query
stringlengths 7
9.55k
| document
stringlengths 10
363k
| metadata
dict | negatives
sequencelengths 0
101
| negative_scores
sequencelengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
Gets the index of the next line that will be fetched by gets, starting at 0. | def lineno
@source.lineno
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compute_line_index\n scanner = StringScanner.new(@string)\n result = [0] # first line starts at 0\n while scanner.scan_until(/\\n/)\n result << scanner.pos\n end\n @line_index = result\n end",
"def get_next_line(file, index)\n index.value += 1\n return get_current_line(file, index)\nend",
"def line_index(pos=pos())\n p = n = 0\n string.each_line do |line|\n p += line.length\n return n if p >= pos\n n += 1\n end\n 0\n end",
"def line_index(pos=pos)\n p = n = 0\n string.each_line do |line|\n p += line.length\n return n if p >= pos\n n += 1\n end\n 0\n end",
"def current_line_number\n # Not using string[@previous_pos..best_pos].count('\\n') because it is slower\n strscan = ::StringScanner.new(string)\n strscan.pos = @previous_pos\n old_pos = pos + 1\n @previous_line_number += 1 while strscan.skip_until(/\\n/) && strscan.pos <= old_pos\n\n @previous_pos = (eos? ? pos : pos + 1)\n @previous_line_number\n end",
"def read\n @index += 1\n line = @last ? @last : @lines[@index]\n @last = nil\n line\n end",
"def from_line_index\n from_line - 1\n end",
"def input_line\n lines - 1\n end",
"def line_index(num)\n\t\tif num == 0\n\t\t\treturn -1\n\t\telsif num <= @line_indices.length\n\t\t\treturn @line_indices[num - 1]\n\t\telsif num == @line_indices.length + 1\n\t\t\treturn @text.length\n\t\telse\n\t\t\treturn -999 # todo\n\t\tend \n\tend",
"def get_line\n @lines.shift\n end",
"def line_at(io, char)\n read(io)[0..char].count(\"\\n\") + 1\n end",
"def get_current_line(file, index)\n current_line = file[index.value]\n if !current_line.nil?\n current_line.chomp()\n end\n while (!current_line.nil?) && current_line.match(/\\A\\s*\\Z/)\n index.value += 1\n current_line = file[index.value]\n if !current_line.nil?\n current_line.chomp()\n end\n end\n return current_line\nend",
"def rl_beg_of_line(count, key)\r\n @rl_point = 0\r\n 0\r\n end",
"def next_line(input=[])\r\n @tokens = input\r\n @column = -1\r\n @line += 1\r\n end",
"def next\n last? ? nil : locate + 1\n end",
"def line\n @string[0..@index].split(\"\\n\").count\n end",
"def line_number\n\t\t\t@line_index + 1\n\t\tend",
"def line_number\n lines_read.length\n end",
"def line\n\t return -1 if @inputStack.empty? # only if initialize() arg is bogus\n\n\t input = @inputStack[0] # not @inputStack.last\n\t str = input.string[0 .. input.pos]\n\t return str.count(\"\\n\") + 1\n\tend",
"def line\n return 1 unless lexing_context && locator\n locator.line_for_offset(lexing_context[:end_offset])\n end",
"def next\n displacement = @file.gets.try(:chomp).try(:to_f)\n return nil unless displacement\n\n ret = @curr_val\n @curr_val += displacement\n ret\n end",
"def get_line(filename, lineno)\n File.open(filename) do |f|\n f.gets until f.lineno == lineno - 1\n f.gets\n end\n end",
"def line_for_offset(offset)\n if @prev_offset == offset\n # use cache\n return @prev_line\n end\n line_nbr = ary_bsearch_i(line_index, offset)\n if line_nbr\n # cache\n @prev_offset = offset\n @prev_line = line_nbr\n return line_nbr\n end\n # If not found it is after last\n # clear cache\n @prev_offset = @prev_line = nil\n return line_index.size\n end",
"def current_line\n\n seek_line_head\n @file.gets\n end",
"def getLine(theLines, theIndex)\n\n\treturn theLines.fetch(theIndex, EMPTY_LINE);\n\nend",
"def next_line\n return nil if @input.eof?\n line, ch, @pos, @line_no = '', '', 0, @line_no + 1\n until ch == \"\\r\" || ch == \"\\n\" || ch.nil?\n ch = @input.getc\n line += ch unless ch.nil?\n end\n if ch == \"\\r\"\n ch = @input.getc\n @input.ungetc(ch) unless ch == \"\\n\" || ch.nil?\n end\n line.chomp << \"\\n\"\n end",
"def get_next\n r = nil\n iterator_lock do\n if @iterator <= @last_id\n r = get(@iterator)\n @iterator += 1\n @iterator_file.write(\"#{@iterator.to_s(36)}\\n\")\n r\n else\n nil\n end\n end\n end",
"def determine_line_number(scanner)\n scanner.string[0, scanner.pos].count(\"\\n\")\n end",
"def get_next\n return if eof?\n\n @buffer << gets if @buffer.empty?\n\n until @io.eof? && @io_buf.empty?\n line = gets\n next unless line\n\n if @parser.start_new?(line) || @buffer.empty?\n @buffer << line\n break\n else\n @buffer.last << line\n end\n end\n\n return if @buffer.empty?\n @parser.parse(@buffer.slice!(0)) || self.get_next\n end",
"def next_match char\n data = get_content\n row = focussed_index + 1\n row.upto(data.length-1) do |ix|\n val = data[ix].chomp rescue return # 2010-01-05 15:28 crashed on trueclass\n #if val[0,1] == char #and val != currval\n if val[0,1].casecmp(char) == 0 #AND VAL != CURRval\n return ix\n end\n end\n row = focussed_index - 1\n 0.upto(row) do |ix|\n val = data[ix].chomp\n #if val[0,1] == char #and val != currval\n if val[0,1].casecmp(char) == 0 #and val != currval\n return ix\n end\n end\n return -1\n end",
"def line_for_offset(offset)\n if line_nbr = line_index.index {|x| x > offset}\n return line_nbr\n end\n # If not found it is after last\n return line_index.size\n end",
"def next_match char\n data = get_content\n row = focussed_index\n currval = data[row].chomp\n row.upto(data.length-1) do |ix|\n val = data[ix].chomp\n if val[0,1] == char and val != currval\n return ix\n end\n end\n 0.upto(row) do |ix|\n val = data[ix].chomp\n if val[0,1] == char and val != currval\n return ix\n end\n end\n return -1\n end",
"def next_line\n $stdin.tty? ?\n Readline.readline('> ', true) :\n $stdin.gets\n end",
"def get_line_from_file(path, line)\n\tresult = nil\n\tFile.open(path, \"r\") do |f|\n \t\twhile line > 0\n \t\t\tline -= 1\n \t\t\tresult = f.gets\n \t\tend\n \tend\n\treturn result\nend",
"def get_number(lines)\n lines.shift.to_i\n end",
"def next\n\t\tlines.shift\n\tend",
"def current_line\n @lines[@current_index]\n end",
"def line_offset(line_index)\n return unless line_exists?(line_index)\n line_index == 0 ? 0 : @line_lengths[0..(line_index - 1)].sum\n end",
"def line_offset(pos=pos())\n p = 0\n string.each_line do |line|\n len = line.length\n return (pos - p) if p + len >= pos\n p += len\n end\n 0\n end",
"def line_offset(pos=pos)\n p = 0\n string.each_line do |line|\n len = line.length\n return (pos - p) if p + len >= pos\n p += len\n end\n 0\n end",
"def read_next()\n return nil if @at_end\n\n begin\n pos = @marker.position\n\n if @marker.character == ?\\n\n pos.line += 1\n pos.column = 0\n end\n\n @marker.character = @reader.next\n @marker.source_index += 1\n pos.column += 1\n rescue StopIteration\n @at_end = true\n @marker.character = nil\n end\n\n @marker.character\n end",
"def line_index()\n end",
"def line_number(pos=pos)\n line_index(pos) + 1\n end",
"def rl_end_of_line(count, key)\r\n @rl_point = @rl_end\r\n 0\r\n end",
"def current_line\n STDIN.each_line.next\nend",
"def count_line_offset(index)\n Array(@spinners[index..-1]).reduce(0) do |acc, spinner|\n if spinner.spinning? || spinner.done?\n acc += 1\n end\n acc\n end\n end",
"def io_buf_line\n index = @io_buf.index \"\\n\"\n return unless index\n\n @io_buf.slice!(0..index)\n end",
"def line_at(char)\n return nil unless char\n text[0..char].count(\"\\n\") + 1\n end",
"def next\n next? ? @current + 1 : nil\n end",
"def line(number)\n lines[number - 1]\n end",
"def line_number(pos=pos())\n line_index(pos) + 1\n end",
"def line_number\n $curwin.cursor.first\n end",
"def get_next\n return if eof?\n\n @buffer << @io.gets if @buffer.empty?\n\n until @io.eof?\n line = @io.gets\n next unless line\n\n if @parser.start_new?(line) || @buffer.empty?\n @buffer << line\n break\n else\n @buffer.last << line\n end\n end\n\n return if @buffer.empty?\n @parser.parse(@buffer.slice!(0)) || self.get_next\n end",
"def linenumber\n return @cmd.nil? ? 0 : @cmd.linenumber\n end",
"def see_next\n if @index >= @normalized_numbers.length\n return -1\n else\n return @normalized_numbers[@index]\n end\n end",
"def next\n\t\t@ibuf = (@ibuf+1).modulo(@nbuf)\n\t\t@buffers[@ibuf]\n\tend",
"def next_skip_line_number\n first = self.next\n first.type == :integer ? self.next : first\n end",
"def lines_offset\n @starting_line - 1\n end",
"def nextChar\n if getChar == \"\\n\"\n @line += 1\n @column = @lastLineSize\n end\n @index += 1\n @column += 1\n end",
"def current_line\n # get the actual offset\n start = @buffer.get_iter_at_offset(@@offset)\n\n # get the command\n line = @buffer.get_text(start, @buffer.end_iter)\n\n return line\n end",
"def line\n buffer[line_number]\n end",
"def get_line_number(index)\n if @list_of_events.length.zero?\n puts 'list is empty'\n else\n temp_event = @list_of_events[index]\n temp_event.line_number\n end\n end",
"def next\n peek.tap { @position += 1 }\n end",
"def find_next_length_index(data)\n last_is_zero = false\n data.each_with_index do |num, i|\n if num.zero?\n if last_is_zero\n return i + 1\n else\n last_is_zero = true\n end\n else\n last_is_zero = false\n end\n end\n end",
"def next_words_on_error\n if n = @src.rest.index(\"\\n\")\n @src.rest[0 .. (n-1)]\n else\n @src.rest\n end\nend",
"def journey_station(line)\n print \"Station?\\t\"\n return line.index(gets.chomp)\nend",
"def get_line_pos(pos)\n lpos = @line_ends.bsearch_index { |x, _| x >= pos }\n return lpos\n end",
"def next_char\n temp = @file.eof? ? '^' : @file.getc\n @line += temp;\n @columnCount+=1\n return temp\n end",
"def nextline\n # handle last file\n\n if @curr.nil?\n @curr=File.open @files.next rescue nil\n end\n\n return if @curr.nil? # Still Nil?\n\n # handle EOF\n if (line = @curr.gets).nil?\n @curr=nil\n :eof\n else\n line.chomp\n end\n end",
"def line(n)\n @lines[n]\n end",
"def get_next_offset(current)\n context.select_value(sanitize([\n %Q{\n SELECT MAX(id)\n FROM (\n SELECT id FROM #{source}\n WHERE id > ?\n ORDER BY id\n LIMIT ?\n ) AS t\n },\n current,\n chunk_size\n ])).tap do |next_offset|\n logger.debug(\"-> Next offset is: #{next_offset}\")\n end\n end",
"def readline(line_no)\n index_offset = 4 + (line_no * 4)\n data_offset = read_int(index_offset)\n\n @data.seek(data_offset)\n @data.readline.chomp\n end",
"def read_next_id\n id = nil\n list = current_list\n\n if @co_index <= list.size\n id = list[@co_index - 1][:id]\n end\n\n @co_index += 1\n\n id\n end",
"def read_by_line_number line_number\n uri = ''\n File.open('storage.db','r') do |f|\n count = f.gets.to_i\n raise 'URI unaccesible' if count < line_number\n while line_number >0\n uri = f.gets\n line_number -= 1\n end\n end\n uri\n end",
"def next()\n @index += 1\n @string[@index...(@index+1)]\n end",
"def move_to_next_line()\r\n while @seek_ptr < @len && @fileBuf.at(@seek_ptr) != \"\\n\"\r\n @seek_ptr = @seek_ptr + 1\r\n end\r\n end",
"def each_reverse_newline_index(chunk)\n while(nl_index = chunk.rindex(\"\\n\", (nl_index||chunk.size)-1))\n yield(nl_index)\n end\n nl_index\nend",
"def num_lines() @line_indices.length + 1 end",
"def rl_get_next_history(count, key)\r\n if (count < 0)\r\n return (rl_get_previous_history(-count, key))\r\n end\r\n if (count == 0)\r\n return 0\r\n end\r\n rl_maybe_replace_line()\r\n\r\n # either not saved by rl_newline or at end of line, so set appropriately.\r\n if (@_rl_history_saved_point == -1 && (@rl_point!=0 || @rl_end!=0))\r\n @_rl_history_saved_point = (@rl_point == @rl_end) ? -1 : @rl_point\r\n end\r\n temp = nil\r\n while (count>0)\r\n temp = next_history()\r\n if temp.nil?\r\n break\r\n end\r\n count -= 1\r\n end\r\n\r\n if temp.nil?\r\n rl_maybe_unsave_line()\r\n else\r\n rl_replace_from_history(temp, 0)\r\n _rl_history_set_point()\r\n end\r\n 0\r\n end",
"def line_pos(row)\n (row > 0 ? src.split(\"\\n\")[0..(row - 1)].inject(0) { |pos, line| pos + line.length + 1 } : 0)\n end",
"def next_cursor\n @result[:next_cursor]\n end",
"def each_line_with_index\n c = 0\n each_line do |l|\n yield l, c\n c += 1\n end\n end",
"def readline(file, line_number)\n current_line = 0\n File.open(file).each do |line_text|\n current_line += 1\n return line_text.strip if current_line == line_number\n end\n end",
"def readline(file, line_number)\n current_line = 0\n File.open(file).each do |line_text|\n current_line += 1\n return line_text.strip if current_line == line_number\n end\n end",
"def last_line(src)\n if n = src.rindex(\"\\n\")\n src[(n+1) .. -1]\n else\n src\n end\nend",
"def index\n history = File.open(@current)\n line = history.readline.rstrip\n line =~ /(\\d+)$/ # get the index\n $1.to_i\n end",
"def line\n\t\treturn @file_data[@current_line]\n\tend",
"def next_line\r\n while true\r\n if (@lexemes[@pointer] != nil && @lexemes[@pointer] != '@')\r\n @pointer = @pointer.next\r\n else\r\n break\r\n end\r\n end\r\n end",
"def next_match str\n first = nil\n ## content can be string or Chunkline, so we had to write <tt>index</tt> for this.\n ## =~ does not give an error, but it does not work.\n @content.each_with_index do |line, ix|\n col = line.index str\n if col\n first ||= [ ix, col ]\n if ix > @current_index\n return [ix, col]\n end\n end\n end\n return first\n end",
"def next_chunk_number\n\t\t\treturn -1\n\t\tend",
"def get_next_link(index, toc_lines)\n\n max_index = toc_lines.length - 1\n (index..max_index).each do |i|\n search = toc_lines[i].scan(/\\[[^\\(]*\\]\\(([^\\[]*?)\\)/m)\n unless search.empty?\n return search[0][0]\n end\n end\n\n # if you get here no match was found\n ''\n end",
"def gets seps=[\"\\n\"]\n if @writecount > @readcount\n line = \"\"\n begin\n c = @buffer[@readcount]\n if !c.nil?\n @readcount = @readcount+1\n line += c\n if seps.include?(c)\n break\n end\n end\n end while !@eof\n line\n elsif @eof\n \"EOF\\n\"\n else\n nil\n end\n end",
"def next\n at(position + 1)\n end",
"def next_edge_index\n # starting at zero\n @next_edge_index ||= 0\n\n @next_edge_index += 1\n\n (@next_edge_index - 1)\n end",
"def _next_num_index(string_copy)\n for i in 0..string_copy.length\n unless string_copy[i] =~ /\\d/\n if i == 0\n return nil\n end\n if string_copy[i] =~ /\\^/\n return nil\n else\n return i - 1\n end\n end\n end\n return string_copy.length - 1\n end",
"def next\n fil_header[:next]\n end",
"def _increment_file_line()\n\t\t@reading_line += 1\n\tend",
"def next_offset\n [all_contents.size + 1, offset + limit].min\n end",
"def read_line_number(filename, number)\n return nil if number < 1\n line = File.readlines(filename)[number-1]\n line ? line.chomp : nil\n end",
"def NextChar\r\n\t\t@Line.slice!(0, 1)\r\n\tend",
"def advance\n @current += 1 unless at_end?\n return previous\n end"
] | [
"0.7382335",
"0.7222228",
"0.69436646",
"0.68897915",
"0.685765",
"0.6754902",
"0.6691898",
"0.66674817",
"0.6660438",
"0.6552503",
"0.6537625",
"0.653273",
"0.6455776",
"0.6442657",
"0.64307666",
"0.64033633",
"0.6379832",
"0.63787097",
"0.6352932",
"0.6321197",
"0.6280669",
"0.62664306",
"0.626013",
"0.6258893",
"0.62522346",
"0.6251256",
"0.6245974",
"0.6235296",
"0.62084156",
"0.62028587",
"0.6198585",
"0.61874545",
"0.6182085",
"0.61766475",
"0.61725885",
"0.6162754",
"0.61592335",
"0.61526066",
"0.61167574",
"0.6102149",
"0.6077087",
"0.60637516",
"0.6061941",
"0.60511124",
"0.604165",
"0.60412306",
"0.60261023",
"0.59905154",
"0.599051",
"0.597987",
"0.59764",
"0.59552807",
"0.5947577",
"0.5939151",
"0.5920671",
"0.59123737",
"0.5911801",
"0.5879323",
"0.586868",
"0.58621293",
"0.58617663",
"0.5851708",
"0.5829972",
"0.5827606",
"0.5822594",
"0.58206385",
"0.58115",
"0.5787496",
"0.576707",
"0.5766633",
"0.5758041",
"0.5737082",
"0.57287085",
"0.5725137",
"0.5724604",
"0.57137597",
"0.57099116",
"0.5709077",
"0.5706348",
"0.56863415",
"0.56791437",
"0.5678853",
"0.5674144",
"0.5674144",
"0.567276",
"0.5654741",
"0.56532305",
"0.5649299",
"0.56425405",
"0.56415194",
"0.5637899",
"0.5637693",
"0.5615822",
"0.561268",
"0.56125057",
"0.5611487",
"0.56109715",
"0.56096613",
"0.56077474",
"0.5605258",
"0.5602305"
] | 0.0 | -1 |
Gets the next scanline from the image. | def gets
@source.gets
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def each_scanline(&block)\n for line_no in 0...height do\n scanline = pixels[width * line_no, width]\n yield(scanline)\n end\n end",
"def next_tile_img\n next_tile_img = @current_tile_indx + 1\n case @current_move\n when Direction::Up\n next_tile_img = 9 if next_tile_img > 11\n when Direction::Down\n next_tile_img = 0 if next_tile_img > 2\n when Direction::Left\n next_tile_img = 3 if next_tile_img > 5\n when Direction::Right\n next_tile_img = 6 if next_tile_img > 8\n end\n @current_tile_indx = next_tile_img\n end",
"def find_first_nonwhite_row image_being_searched\n raise \"deprecated\"\n m_begin \"find_first_nonwhite_row\"\n img = get_image(image)\n rows = img.rows\n cols = img.columns\n rows.times do |row|\n scanline = img.export_pixels(0, row, cols, 1, map=\"I\")\n scanline.each {|x| return row if x != White}\n end\n m_end \"find_first_nonwhite_row\"\n nil\n end",
"def raster_line\n @cr[4] << 1 | @cr[3][7]\n end",
"def next_row\n @rows+=1\n set RGhost::Cursor.next_row\n end",
"def next_row\n @next_row ||= _read_row\n end",
"def each_pixel()\n return to_enum :each_pixel unless block_given?\n @image.height.times do |y|\n @image.width.times do |x|\n yield pixel(x,y)\n end\n end\n self\n end",
"def next\n Photo.find_by_id(next_id) if next?\n end",
"def next\n self.offset(1)\n end",
"def next\n @current_image = @files[@files.index(@current_image) + 1] || @files.last\n end",
"def get_next_line(file, index)\n index.value += 1\n return get_current_line(file, index)\nend",
"def next\n at(position + 1)\n end",
"def next_row\n observation_matrix.observation_matrix_rows.where('position > ?', position).order(:position).first\n end",
"def next()\n x = @arr[@i]\n @i += 1\n x\n end",
"def next_cell\n if has_adjacent_cell?(@current_x, @current_y)\n @current_x += 1\n get_cell(@current_x-1, @current_y)\n elsif has_adjacent_row?(@current_x, @current_y)\n @current_y +=1\n @current_x = 0\n get_cell(@current_x, @current_y-1)\n elsif get_cell(@current_x, @current_y)\n @current_x += 1\n get_cell(@current_x-1, @current_y)\n else\n nil\n end\n end",
"def next\n\t\tlines.shift\n\tend",
"def find_last_white_row image\n raise \"deprecated\"\n m_begin \"find_last_white_row\"\n img = get_image(image)\n img.rows.times do |row|\n row_excerpt = img.excerpt(0, row, img.columns, 1)\n row_dot = row_excerpt.scale(1, 1)\n color = row_dot.get_pixels(0,0,1,1)[0].red\n return row-1 if color <= White * 0.75\n end\n m_end \"find_last_white_row\"\n nil\n end",
"def next_scan \n lastPos = self.pos\n while (!self.eof )\n l = self.readline\n break if l =~ /\\<scan|\\<spectrum\\s/\n lastPos = self.pos\n end\n self.pos = lastPos\n get_scan_from_curr_pos\n end",
"def read_row\n return if end_of_file?\n\n @current_row = @next_row || _read_row\n @line_number = current_row.nil? ? nil : @line_number + 1\n @next_row = nil\n\n current_row\n end",
"def next_row\n raise(\"no more rows available\") unless next?\n self.last_row = self.rows[self.current_row_index]\n self.current_row_index += 1\n\n if self.current_row_index == self.rows.size\n self.rows = nil\n end\n\n self.last_row\n end",
"def next_cursor\n @result[:next_cursor]\n end",
"def draw_horizontal(row: 0, start_col: 0, end_col: 0, color: \"R\")\n image.map!.with_index do |row_array, row_num|\n if row == row_num\n row_array.map.with_index do |pixel, col_num|\n if col_num >= start_col && col_num <= end_col\n color\n else\n pixel\n end\n end\n else\n row_array\n end\n end\n save_state\n self\n end",
"def reconstructed_scanline\n @reconstructed_scanline = []\n @filtered.each do |byte|\n recon = case @type\n when 0\n none(byte)\n when 1\n sub(byte)\n when 2\n up(byte)\n when 3\n average(byte)\n when 4\n paeth(byte)\n end\n\n @reconstructed_scanline << (recon % 256)\n @position += 1\n end\n\n @reconstructed_scanline\n end",
"def get_line\n @lines.shift\n end",
"def each\n (0...height).each do |y|\n row = (0...width).each_with_object(Array.new(width)) do |x, r|\n r[x] = get_pixel(x, y)\n end\n yield row\n end\n end",
"def next_row\n @current_row += 1\n end",
"def next\n layout.rows[number+1] unless last?\n end",
"def next\n begin\n self.band.levels[band.levels.index(self) + 1]\n rescue\n return nil\n end\n end",
"def get_next\n r = nil\n iterator_lock do\n if @iterator <= @last_id\n r = get(@iterator)\n @iterator += 1\n @iterator_file.write(\"#{@iterator.to_s(36)}\\n\")\n r\n else\n nil\n end\n end\n end",
"def next()\n result = current\n @index += 1\n @got_next_element = false\n @next_element = nil\n result\n end",
"def each_pixel\n get_pixels(0, 0, columns, rows).each_with_index do |p, n|\n yield(p, n % columns, n / columns)\n end\n self\n end",
"def next\n return @page + 1\n end",
"def [](x, y)\n validate_x_y_coords(x, y)\n @pixels[y - 1][x - 1]\n end",
"def next_result(obs, imgs, img, inc = 1)\n next_idx = imgs.index(img) + inc\n # if there's another img for this obs, just get it\n if next_idx.between?(0, imgs.count - 1)\n img = imgs[next_idx]\n # else get the next obs, if there is one\n elsif (obs = obs_with_imgs_ids[obs_with_imgs_ids.index(obs) + inc])\n # get its list of image ids\n imgs = Observation.find(obs).images.order(id: :asc).map(&:id)\n # get first or last image in the list\n # depending on whether were going forward or back through results\n img = inc.positive? ? imgs.first : imgs.last\n end\n [obs, imgs, img]\n end",
"def pixels(i)\r\n i.get_pixels \r\n end",
"def next\n next? ? @current + 1 : nil\n end",
"def next\n\t\t@ibuf = (@ibuf+1).modulo(@nbuf)\n\t\t@buffers[@ibuf]\n\tend",
"def next_skip_line_number\n first = self.next\n first.type == :integer ? self.next : first\n end",
"def scan(img)\n num_images = 0\n img_height,img_width = img.length,img.width\n coords = coordinates(img.data)\n\n for x in 0..(@size - img_width-1) do\n for y in 0..(@size - img_height-1) do\n match = coords.map do |x1,y1|\n break [false] if @data[x+x1][y+y1] != @char\n true\n end\n num_images+=1 if match.all?\n end\n end\n num_images\n end",
"def encode_png_pixels_to_scanline_indexed_8bit(pixels)\n pixels.map { |p| encoding_palette.index(p) }.pack(\"xC#{width}\")\n end",
"def row\n return nil unless @row\n if @row < 0\n return FFI::NCurses.LINES + @row\n end\n @row\n end",
"def get_next_entry; end",
"def get_next\n\n if (@local_iterator)\n @local_iterator.get_next\n else\n raise \"No iterator defined! Cannot get next element\"\n end\n\n end",
"def read_row radius\n @image.format('png')\n p = ChunkyPNG::Image.from_io(StringIO.new(@image.to_blob))\n a = (0...p.width).to_a.map{ |x| p[x, radius] }\n end",
"def next\n if @state == :start && @scanner.eos?\n return nil\n else\n scan_next_token\n end\n end",
"def row_of( y )\n @top_line + y\n end",
"def row_of( y )\n @top_line + y\n end",
"def get_next()\n\t\t\treturn @next\n\t\tend",
"def getNext\n return @touches.shift\n end",
"def next()\n @array[@k+=1]\n end",
"def a\n offset = @position - @pixel_width\n return 0x00 if offset < 0\n\n @reconstructed_scanline[offset]\n end",
"def next\n peek.tap { @position += 1 }\n end",
"def peek_next()\n return nil if @at_end\n\n begin\n @reader.peek\n rescue StopIteration\n nil\n end\n end",
"def next\n @pointer += 1\n end",
"def next\r\n next_values[0]\r\n end",
"def right_edge\n x = width-1\n @image.get(x,0,x,height).pixels\n end",
"def next\n last? ? nil : locate + 1\n end",
"def get x, y\n @pixels[pixel(x,y)]\n end",
"def find_last_row color, inimage, direction= :top_to_bottom\n m_begin \"find_last_row\"\n img = get_image(inimage)\n img.rows.times do |row|\n if direction == :top_to_bottom \n row_excerpt = img.excerpt(0, row, img.columns, 1)\n elsif direction == :bottom_to_top\n row_excerpt = img.excerpt(0, img.rows-row, img.columns, 1)\n else\n raise \"invalid direction symbol in find_last_row\"\n end\n row_dot = row_excerpt.scale(1, 1)\n pixelcolor = row_dot.get_pixels(0,0,1,1)[0].red\n if color == :white\n return row-1 if pixelcolor <= White * 0.75\n elsif color == :black\n return row-1 if pixelcolor >= White * 0.75\n else\n raise \"invalid color symbol in find_last_row\"\n end \n end\n nil\n ensure\n m_end \"find_last_row\"\n end",
"def next\n fil_header[:next]\n end",
"def advance\n @current += 1 unless at_end?\n return previous\n end",
"def next\n @offset = get_current_offset + get_count\n get_results @current_projection\n end",
"def row(y)\n i = y - 1\n self.cells.where(y: i)\n end",
"def item_at_head\n\t\t\t@head.next\n\t\tend",
"def next\n ptr = C.LLVMGetNextInstruction(self)\n LLVM::Instruction.from_ptr(ptr) unless ptr.null?\n end",
"def next_line\n return nil if @input.eof?\n line, ch, @pos, @line_no = '', '', 0, @line_no + 1\n until ch == \"\\r\" || ch == \"\\n\" || ch.nil?\n ch = @input.getc\n line += ch unless ch.nil?\n end\n if ch == \"\\r\"\n ch = @input.getc\n @input.ungetc(ch) unless ch == \"\\n\" || ch.nil?\n end\n line.chomp << \"\\n\"\n end",
"def rl_beg_of_line(count, key)\r\n @rl_point = 0\r\n 0\r\n end",
"def next\n if (r = @lastresult[\"queries\"][\"nextPage\"])\n @previous = @lastresult.clone\n\n return search_by_query_hash(r.first)\n else\n return nil\n end\n end",
"def next_hex\n c = self.next\n return nil if c.nil?\n return c if hex?(c)\n nil\n end",
"def get_pixel(x, y)\n ::GD2::GD2FFI.send(:gdImageGetPixel, @image_ptr, x.to_i, y.to_i)\n end",
"def next_page\n set RGhost::Cursor.next_page\n end",
"def read_next()\n return nil if @at_end\n\n begin\n pos = @marker.position\n\n if @marker.character == ?\\n\n pos.line += 1\n pos.column = 0\n end\n\n @marker.character = @reader.next\n @marker.source_index += 1\n pos.column += 1\n rescue StopIteration\n @at_end = true\n @marker.character = nil\n end\n\n @marker.character\n end",
"def next_item\n return nil if @link == nil\n link.kernel.select {|item| item.rule == @rule}.first\n end",
"def each_line(sub_area)\n byte_width_minus_one = sub_area.w * pixel_byte_size - 1\n stride = line_byte_size\n offset = byte_offset(sub_area.loc) - stride\n\n sub_area.h.times do\n offset += stride\n range = offset .. (offset + byte_width_minus_one)\n yield range\n end\n end",
"def gets\n return nil if @lineno >= @height\n sample = (@lineno * @source.height / @height.to_f).floor\n @lineno += 1\n Interpolation.nearest(get_buf(sample), @width, components)\n end",
"def current_line\n @lines[@current_index]\n end",
"def next\n next_row = @next_node.next()\n while next_row != nil\n # return row if it matches predicate\n return next_row if @predicate.call(next_row)\n # otherwise, keep calling next\n next_row = @next_node.next()\n end\n return nil\n end",
"def gets\n return nil if @lineno >= @height\n sample = @lineno * @source.height / @height.to_f\n sample_i = sample.to_i\n ty = sample - sample_i\n @lineno += 1\n get_buf(sample_i)\n\n Interpolation.bilinear(@buf1, @buf2, @width, ty, components)\n end",
"def next\n block.instructions[index+1] || (block.next ? block.next.instructions.first : nil)\n end",
"def next!\r\n @cur = @cache[@idx+=1]\r\n end",
"def adjacent_row(x,y)\n if has_adjacent_row?(x,y)\n [x, y+1]\n else\n nil\n end\n end",
"def row(y)\n src.xsize.times.map { |x| src[x, y] }\n end",
"def get_row_from_index(index)\n (index - 1) / 8 + 1\n end",
"def next(pointer); end",
"def next(pointer); end",
"def get_next_coords(direction)\n x, y = get_head_coords[:x], get_head_coords[:y]\n case direction\n when 'S'\n y += 1\n when 'N'\n y -= 1\n when 'E'\n x += 1\n when 'W'\n x -= 1\n end\n return x, y\n end",
"def rowAt(row)\n self.tiles[row.to_i * N_COLS, N_COLS]\n end",
"def get_next()\n return @next_node\n end",
"def left_edge\n @image.get(0,0,1,height).pixels\n end",
"def get_next_block\n @store.get_block_by_prev_hash(@hash)\n end",
"def next\n @opened && @cur_record_index < @num_records ? _read_next_record : nil\n end",
"def next_i\n raise \"implement in subclass\"\n end",
"def get_row_from_index(index)\n return (index - 1)/8 + 1\n end",
"def sub_horizontal_line(start, length)\n offset = byte_offset start\n data[offset .. (offset + (length*pixel_byte_size)-1)]\n end",
"def next\n\t\tself + 1\n\tend",
"def next\n @source.next\n @source.trace\n end",
"def next_line\n CSI + 'E' + column(1)\n end",
"def horizontal_grid_line(index)\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.GraphComponentState_horizontal_grid_line(@handle.ptr, index)\n result\n end",
"def next\n @history_idx -= 1 if @history_idx > -1\n current\n end",
"def next\n\t\t@skip = (@skip || 0) + get_resultcount\n\t\t@result = nil\n\t\treturn self\n\tend",
"def nextline\n # handle last file\n\n if @curr.nil?\n @curr=File.open @files.next rescue nil\n end\n\n return if @curr.nil? # Still Nil?\n\n # handle EOF\n if (line = @curr.gets).nil?\n @curr=nil\n :eof\n else\n line.chomp\n end\n end"
] | [
"0.6727522",
"0.5699969",
"0.5695894",
"0.56392694",
"0.56215143",
"0.5598582",
"0.5414653",
"0.53608847",
"0.5341764",
"0.5335011",
"0.5294246",
"0.5280069",
"0.5276989",
"0.52690667",
"0.52583206",
"0.52565295",
"0.52398974",
"0.5227226",
"0.5226742",
"0.51903397",
"0.51595324",
"0.5155409",
"0.514014",
"0.51308787",
"0.5124625",
"0.5103672",
"0.5099047",
"0.5041712",
"0.50227195",
"0.5020985",
"0.49968323",
"0.49908796",
"0.49900642",
"0.49848604",
"0.498283",
"0.4977773",
"0.49654418",
"0.49611896",
"0.49587512",
"0.49586806",
"0.49507448",
"0.49267465",
"0.4923575",
"0.48947793",
"0.48943844",
"0.4883287",
"0.4883287",
"0.4867791",
"0.4864975",
"0.4863895",
"0.48529235",
"0.48500007",
"0.4845269",
"0.48409775",
"0.48318094",
"0.48289445",
"0.48268005",
"0.48146552",
"0.47913793",
"0.4791265",
"0.47822428",
"0.4764917",
"0.4762721",
"0.47571135",
"0.47385156",
"0.47279063",
"0.47208807",
"0.47104537",
"0.4699266",
"0.469754",
"0.4694946",
"0.468251",
"0.46732116",
"0.4670671",
"0.46590808",
"0.4653804",
"0.46507812",
"0.46492139",
"0.46429816",
"0.46317872",
"0.4630456",
"0.4622688",
"0.46202528",
"0.4619146",
"0.4619146",
"0.4615965",
"0.4614862",
"0.4602568",
"0.4602474",
"0.45967588",
"0.45822546",
"0.4582249",
"0.4580565",
"0.45726806",
"0.45680672",
"0.45666826",
"0.45595703",
"0.455529",
"0.45528603",
"0.45456508",
"0.4536872"
] | 0.0 | -1 |
Setup the database migrations | def copy_migrations
[
'create_file_view_stats.rb',
'create_file_download_stats.rb'
].each do |file|
better_migration_template file
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def migrate\n run_migrations pending_migrations, :up\n end",
"def run\n load_migrations\n @migrations.each do |mig_class, version|\n mig_class.up\n # Add it to the schema_migrations table as well\n # This will fail if auto-migrations is only and always used,\n # as the schema_migrations table will not exist.\n SchemaMigration.find_or_create_by_version(version) rescue nil\n end\n end",
"def migrations\n rake 'admin:install:migrations'\n rake 'db:migrate SCOPE=admin'\n end",
"def install_migrations\n migrations = [\n \"create_blogelator_posts.rb\",\n \"create_blogelator_authors.rb\",\n \"create_blogelator_tags.rb\",\n \"create_blogelator_posts_tags.rb\",\n \"create_blogelator_posts_posts.rb\"\n ]\n migration_path = \"db/migrate\"\n migrations.each do |file|\n migration_template \"#{migration_path}/#{file}\", \"#{migration_path}/#{file}\"\n end\n end",
"def setup\n setup_test_database\n drop_and_create_schema_migrations_table\n end",
"def initial_setup\n CORE.each { |c| c.auto_migrate! }\n end",
"def do_migrations\n migration_path = File.join(\"generators\", \"talia\", \"templates\", \"migrations\")\n ActiveRecord::Migrator.migrate(migration_path, ENV[\"VERSION\"] ? ENV[\"VERSION\"].to_i : nil )\n end",
"def migrate_database\n RFlow.logger.debug 'Applying default migrations to config database'\n migrations_path = File.join(File.dirname(__FILE__), 'configuration', 'migrations')\n ActiveRecord::Migration.verbose = false\n ActiveRecord::Migrator.migrate migrations_path\n end",
"def install_migrations\n rake(\"maintenance_tasks:install:migrations\")\n rake(\"db:migrate\")\n end",
"def setup options = {}\n options.to_options!\n chroot do\n generate_migration options\n migrate options\n end\n end",
"def create_tables!\n migrate(:up)\n end",
"def migration_railties; end",
"def migration_railties; end",
"def run_active_record_migrations!\n ActiveRecord::Migration.verbose = false\n ActiveRecord::Migrator.migrate([\"test/fixtures/migrate\"])\n end",
"def install_migrations\n\t\tsay_status :copying, \"migrations\"\n\t\trake 'railties:install:migrations'\n\tend",
"def migration\n end",
"def migrate!\n connect! unless connected?\n Sequel.extension :migration\n Sequel::Migrator.run(db, File.join(__dir__, \"../../db/migrations\"), table: schema_table)\n end",
"def migrate\n ActiveRecord::Migrator.migrate(File.join(db_dir, \"migrate\"))\n end",
"def migrations\n raise(ArgumentError, \"Can't set migrations while using :version option\") if @using_deprecated_version_setting\n yield\n end",
"def setup\n begin\n create_campaign_table_if_not_exist\n seed_data\n rescue Exception => e\n raise \"Database setup failed with error #{e}\"\n ensure\n @connection.close\n end\n end",
"def migrate!\n Migrator.migrate(name)\n end",
"def migrate!\n Migrator.migrate(name)\n end",
"def install_migrations\n puts \"Copying over Cadenero migrations...\"\n Dir.chdir(Rails.root) do\n `rake cadenero:install:migrations`\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def install_sequence\n stop\n \n backup_database\n install_pre_hook\n pre_migrate_database\n copy_files\n freeze_rails\n create_default_config_files\n fix_permissions\n create_directories\n create_initial_database\n set_initial_port_number\n expand_template_files\n \n migrate\n install_post_hook\n save\n \n run_rails_tests\n \n start\n end",
"def dbmigrate!\n ActiveRecord::Base.establish_connection(PuppetHerald.database.spec)\n ActiveRecord::Migrator.up 'db/migrate'\n ActiveRecord::Base.clear_active_connections!\n nil\n end",
"def apply\n migration.up\n end",
"def migrate!\n @logger.fine('Dropping schema...')\n\n migrate(0) # migrate to version 0.\n migrate # migrate to latest version.\n end",
"def generate_migrations\n versions = []\n versions << generate_migration(\"create_users\", <<-EOF\nHanami::Model.migration do\n change do\n create_table :users do\n primary_key :id\n column :name, String\n end\n end\nend\nEOF\n)\n\n versions << generate_migration(\"add_age_to_users\", <<-EOF\nHanami::Model.migration do\n change do\n add_column :users, :age, Integer\n end\nend\nEOF\n)\n versions\n end",
"def migrate(version = nil)\n @logger.fine('Running test migrations...')\n super(File.join(Automation::FRAMEWORK_ROOT, Automation::FET_DIR, 'test/database/migrations'), version)\n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def migrate\n DataMapper.auto_migrate!\n end",
"def auto_migrate!\n AutoMigrator.auto_migrate(name)\n end",
"def auto_migrate!\n AutoMigrator.auto_migrate(name)\n end",
"def up\n ActiveRecord::Base.transaction do\n migrate_cause\n migrate_messages\n end\n end",
"def migrations\n @migrations ||= {}\n end",
"def up\n end",
"def run_migrations\n unless options[\"no-migrate\"]\n puts \"Running rake db:migrate\"\n `rake db:migrate`\n end\n end",
"def generate!\n ::ActiveRecord::Base.establish_connection 'production'\n Schemaless::MigrationsGenerator.new(all_tables).invoke_all\n end",
"def run_before_migrate_setup\n Chef::Log.info 'No before migrate setup defined.'\n end",
"def generate_migrations\n Dir[File.join(source_paths, 'db', 'migrate', '*.rb')].each do |file|\n migration_filepath = file.match(/\\A.+\\/(db\\/migrate\\/.+)\\Z/i)[1]\n raw_migration_filename = file.match(/\\d+\\_(.+)\\Z/i)[1] \n migration_template migration_filepath, \"db/migrate/#{raw_migration_filename}\" \n end\n end",
"def copy_migrations\n # Can't get this any more DRY, because we need this order.\n better_migration_template \"create_searches.rb\"\n better_migration_template \"create_bookmarks.rb\"\n better_migration_template \"remove_editable_fields_from_bookmarks.rb\"\n better_migration_template \"add_user_types_to_bookmarks_searches.rb\"\n end",
"def run_migrations(migrations)\n migrations.each do |direction, version_or_filenames|\n Array.wrap(version_or_filenames).each do |version_or_filename|\n /^(?<version>\\d{3,})/ =~ File.basename(version_or_filename)\n ActiveRecord::Migrator.run(direction, ActiveRecord::Migrator.migrations_path, version.to_i)\n end if version_or_filenames\n end\n if ActiveRecord::Base.schema_format == :ruby\n File.open(ENV['SCHEMA'] || \"#{Rails.root}/db/schema.rb\", 'w') do |file|\n ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)\n end\n end\n #TODO unload migraion classes\n end",
"def add_migrations\n \tmigrations = Dir.glob(SocialFramework::Engine.config.paths[\"db/migrate\"].first + \"/*\")\n\n if options[:migrations]\n options[:migrations].each do |migrate|\n file = \"social_framework_#{migrate.pluralize}.rb\"\n file = migrations.select { |m| m.include?(file) }.first\n unless file.nil? or file.empty?\n file_name = file.split(\"/\").last\n copy_file file, \"db/migrate/#{file_name}\"\n else\n puts \"Could not find migration: '#{migrate}'\"\n end\n end\n else\n migrations.each do |migrate|\n file = migrate.split(\"/\").last \n copy_file migrate, \"db/migrate/#{file}\"\n end\n end\n end",
"def run_migrations(migrations)\n migrations.each do |direction, version_or_filenames|\n Array.wrap(version_or_filenames).each do |version_or_filename|\n version = File.basename(version_or_filename)[/\\d{3,}/]\n\n if defined? ActiveRecord::MigrationContext # >= 5.2\n ActiveRecord::Base.connection.migration_context.run(direction, version.to_i)\n else\n ActiveRecord::Migrator.run(direction, ActiveRecord::Migrator.migrations_paths, version.to_i)\n end\n end if version_or_filenames\n end\n if ActiveRecord::Base.schema_format == :ruby\n File.open(ENV['SCHEMA'] || \"#{Rails.root}/db/schema.rb\", 'w') do |file|\n ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)\n end\n end\n #TODO unload migraion classes\n end",
"def migrate\n raise NotImplementedError\n end",
"def install_migrations\n rake \"platforms_core:install:migrations\"\n end",
"def pre_migrate_database\n old_schema_version = get_schema_version\n new_schema_version = File.read(File.join(source_directory,'db','schema_version')).to_i\n \n return unless old_schema_version > 0\n \n # Are we downgrading?\n if old_schema_version > new_schema_version\n message \"Downgrading schema from #{old_schema_version} to #{new_schema_version}\"\n \n in_directory install_directory do\n unless system(\"rake -s migrate VERSION=#{new_schema_version}\")\n raise InstallFailed, \"Downgrade migrating from #{old_schema_version} to #{new_schema_version} failed.\"\n end\n end\n end\n end",
"def copy_migrations\n rake \"sipity:install:migrations\"\n end",
"def setup\n clear_db\n\n @db = Wgit::Database.new\n end",
"def up(&block)\n migration.up = block\n end",
"def auto_migrate!\n DataMapper.auto_migrate!(name)\n end",
"def reset_migrations!\n @migrations = nil\n @migrate_to = nil\n Neo4j::Transaction.run do\n Neo4j.ref_node[:_db_version] = nil\n end\n end",
"def migrations\n @migrations ||= begin\n paths = Dir[\"#{migrations_path}/*.rb\"]\n migrations = paths.map { |path| MigrationProxy.new(path) }\n migrations.sort\n end\n end",
"def setup_db\n return unless File.exist?(\"#{Jets.root}/config/database.yml\")\n\n db_configs = Jets.application.config.database\n # DatabaseTasks.database_configuration for db:create db:migrate tasks\n # Documented in DatabaseTasks that this is the right way to set it when\n # using ActiveRecord rake tasks outside of Rails.\n ActiveRecord::Tasks::DatabaseTasks.database_configuration = db_configs\n\n current_config = db_configs[Jets.env]\n if current_config.blank?\n abort(\"ERROR: config/database.yml exists but no environment section configured for #{Jets.env}\")\n end\n # Using ActiveRecord rake tasks outside of Rails, so we need to set up the\n # db connection ourselves\n ActiveRecord::Base.establish_connection(current_config)\n end",
"def reset_migrations!\n @migrations = nil\n @migrate_to = nil\n Neo4j::Transaction.run do\n migration_meta_node[:_db_version] = nil\n end\n end",
"def create_migration_files\n source_dir = 'db/migrate/country_domain'\n destination_dir = 'db/migrate'\n\n migration_template(\n \"#{source_dir}/0001_create_countries.rb\",\n \"#{destination_dir}/create_countries.rb\"\n )\n\n migration_template(\n \"#{source_dir}/0002_create_administrative_level_types.rb\",\n \"#{destination_dir}/create_administrative_level_types.rb\"\n )\n\n migration_template(\n \"#{source_dir}/0003_create_administrative_divisions.rb\",\n \"#{destination_dir}/create_administrative_divisions.rb\"\n )\n\n migration_template(\n \"#{source_dir}/0004_create_postal_codes.rb\",\n \"#{destination_dir}/create_postal_codes.rb\"\n )\n\n migration_template(\n \"#{source_dir}/0005_create_administrative_street_names.rb\",\n \"#{destination_dir}/create_administrative_street_names.rb\"\n )\n\n migration_template(\n \"#{source_dir}/0006_create_administrative_street_numbers.rb\",\n \"#{destination_dir}/create_administrative_street_numbers.rb\"\n )\n end",
"def run_local_migrations()\n setup_local_environment\n # Runs migrations against the local database.\n common = Common.new\n Dir.chdir('db') do\n common.run_inline %W{./run-migrations.sh main}\n end\n Dir.chdir('db-cdr/generate-cdr') do\n common.run_inline %W{./init-new-cdr-db.sh --cdr-db-name cdr}\n end\n common.run_inline %W{gradle :loadConfig -Pconfig_key=main -Pconfig_file=config/config_local.json}\n common.run_inline %W{gradle :loadConfig -Pconfig_key=cdrBigQuerySchema -Pconfig_file=config/cdm/cdm_5_2.json}\n common.run_inline %W{gradle :loadConfig -Pconfig_key=featuredWorkspaces -Pconfig_file=config/featured_workspaces_local.json}\n common.run_inline %W{gradle :updateCdrConfig -PappArgs=['config/cdr_config_local.json',false]}\nend",
"def copy_migrations\n rake 'hyrax:install:migrations'\n end",
"def migrate options = {}\n options.to_options!\n chroot do\n util.spawn \"rake RAILS_ENV=#{ Bj.rails_env } db:migrate\", options\n end\n end",
"def copy_migrations\n [\n \"acts_as_follower_migration.rb\",\n \"add_social_to_users.rb\",\n \"add_ldap_attrs_to_user.rb\",\n \"add_avatars_to_users.rb\",\n \"add_groups_to_users.rb\",\n \"create_local_authorities.rb\",\n \"create_trophies.rb\",\n 'add_linkedin_to_users.rb',\n 'create_tinymce_assets.rb',\n 'create_content_blocks.rb',\n 'create_featured_works.rb',\n 'add_external_key_to_content_blocks.rb'\n ].each do |file|\n better_migration_template file\n end\n end",
"def setup_db\n #Supress annoying Schema creation output when tests run\n old_stdout = $stdout\n $stdout = StringIO.new\n \n ActiveRecord::Schema.define(:version => 1) do\n create_table :people do |t|\n t.column :username, :string\n t.column :hair_color, :string\n end\n \n create_table :coffee_mugs do |t|\n t.column :person_id, :integer\n end\n \n create_table :roles do |t|\n t.column :name, :string\n end\n \n create_table :people_roles do |t|\n t.column :role_id, :integer\n t.column :person_id, :integer\n end\n \n create_table :notebooks do |t|\n t.column :title, :string\n t.column :owner_id, :integer\n t.column :ghost_writer_id, :integer\n end\n \n create_table :update_permissions do |t|\n t.column :updater_id, :integer\n t.column :notebook_id, :integer\n end\n \n create_table :pages do |t|\n t.column :number, :integer\n t.column :notebook_id, :integer\n end\n \n create_table :margin_notes do |t|\n t.column :content, :string\n t.column :page_id, :integer\n end\n \n create_table :coffee_stains do |t|\n t.column :opacity, :integer\n t.column :page_id, :integer\n end\n \n create_table :words do |t|\n t.column :text, :string\n t.column :page_id, :integer\n end\n end\n \n #Re-enable stdout\n $stdout = old_stdout\nend",
"def generate\n if Rails.version < '4'\n migration_template('rails3_migration',\n \"#{db_migrate_path}/create_db_poller.rb\")\n else\n migration_template('migration',\n \"#{db_migrate_path}/create_db_poller.rb\")\n end\n end",
"def copy_migrations\n # Can't get this any more DRY, because we need this order.\n %w{acts_as_follower_migration.rb\tadd_social_to_users.rb\t\tcreate_single_use_links.rb\tadd_ldap_attrs_to_user.rb\nadd_avatars_to_users.rb\t\tcreate_checksum_audit_logs.rb\tcreate_version_committers.rb\nadd_groups_to_users.rb\t\tcreate_local_authorities.rb\tcreate_trophies.rb}.each do |f|\n better_migration_template f\n end\n end",
"def run_migrations\n return unless pending_migrations?\n callback(:run_migrations) do\n notify(:run_migrations)\n heroku.run_migrations\n end\n end",
"def setup_databases\n postgres_user = app_name\n postgres_pass = SecureRandom.urlsafe_base64\n postgres_port = find_open_port\n redis_port = find_open_port\n\n add_env \"REDIS_URL\",\n \"redis://localhost:#{redis_port}\"\n\n add_env \"DATABASE_URL\",\n \"postgres:///#{postgres_user}:#{postgres_pass}@localhost:#{postgres_port}\",\n skip_secrets: true\n\n template \"database.yml\",\n \"#{app_name}/config/database.yml\",\n force: true\n\n template \"docker-compose.yml\",\n \"#{app_name}/docker-compose.yml\",\n postgres_user: postgres_user,\n postgres_pass: postgres_pass,\n postgres_port: postgres_port,\n redis_port: redis_port\n end",
"def auto_upgrade!\n AutoMigrator.auto_upgrade(name)\n end",
"def auto_upgrade!\n AutoMigrator.auto_upgrade(name)\n end",
"def migrate\n with_maintenance do\n backup if backup?\n run_migration\n restart\n end\n end",
"def run_db_migrate_rake_task(rollback = false)\n run_custom_build_steps :before_database_migrations\n\n old_schema_version = 0\n old_schema_version = @metadata.read(schema_version_cache).chomp.to_i if @metadata.exists?(schema_version_cache)\n rollback = true if old_schema_version > schema_version\n old_schema_version = @metadata.read(rollback_schema_version_cache).chomp if @metadata.exists?(rollback_schema_version_cache) && rollback\n return true if schema_same_since?(old_schema_version)\n\n instrument \"rails3.run_db_migrate_rake_task\" do\n log(\"db_migrate\") do\n\n migrate = rake.task(\"db:migrate\")\n migrate = rake.task(\"db:rollback\") if rollback\n\n return true unless migrate.is_defined?\n\n if ENV['FORCE_DATABASE_MIGRATIONS']\n topic(\"Forcing database migrations.\")\n else\n topic(\"Running database migrations\") unless rollback\n topic(\"Rolling back database to version #{old_schema_version}\") if rollback\n end\n\n if user_env_hash.empty?\n default_env = {\n \"DATABASE_URL\" => ENV[\"DATABASE_URL\"] || default_database_url\n }\n else\n default_env = {\n \"DATABASE_URL\" => default_database_url\n }\n end\n\n default_env['VERSION'] = old_schema_version if rollback\n\n cache.load migrations_cache if rollback # we need the newer migrations to be able to rollback\n\n migrate.invoke(env: default_env.merge(user_env_hash).merge(\"RAILS_ENV\" => \"migrations\"))\n\n if migrate.success?\n log \"db_migrate\", :status => \"success\"\n puts \"Database migrations completed (#{\"%.2f\" % migrate.time}s)\" unless rollback\n puts \"Database rollback completed (#{\"%.2f\" % migrate.time}s)\" if rollback\n\n FileUtils.mkdir_p(heroku_metadata)\n @metadata.write(rollback_schema_version_cache, old_schema_version, false)\n @metadata.write(schema_version_cache, schema_version, false) unless rollback\n @metadata.write(schema_version_cache, old_schema_version, false) if rollback\n @metadata.save\n\n cache.store migrations_cache\n\n run_custom_build_steps :after_database_migrations\n else\n log \"db_migrate\", :status => \"failure\"\n error \"Database migrations failed.\" unless rollback\n error \"Database rollback failed.\" if rollback\n end\n end\n end\n end",
"def setup_database\n Hanami::Model.load!\n end",
"def migrate\n db.create_table? table_name do\n primary_key :id\n String :ptype\n String :v0\n String :v1\n String :v2\n String :v3\n String :v4\n String :v5\n end\n end",
"def up\n # Use self.class:: so constants are resolved in subclasses instead of this class.\n self.class::COLUMNS.each do |column|\n change_column_null(self.class::TABLE_NAME, column, false)\n end\n end",
"def run_migrations(manifest)\n\n # run bootstrap before user migrations to prepare database\n run_bootstrap\n\n # loop through the manifest, executing migrations in turn\n manifest.each_with_index do |migration, index|\n execute_migration(migration.name, migration.filepath)\n end\n\n end",
"def up\n builds_with_artifacts.find_each do |build|\n build.migrate_artifacts!\n end\n end",
"def supports_migrations?\n true\n end",
"def supports_migrations?\n true\n end",
"def auto_migrate!(repository_name = nil)\n auto_migrate_down!(repository_name)\n auto_migrate_up!(repository_name)\n end",
"def migrate\n puts \"Migrating your database\"\n version = `ls db/migrate | wc -l`.to_i\n ActiveRecord::Base.establish_connection(Play.config['db'])\n ActiveRecord::Migrator.migrate(\"#{File.dirname(__FILE__)}/../db/migrate/\", version)\nend",
"def check_schema_migrations\n return if column_family_exists?('schema_migrations')\n say 'Creating schema_migrations column family'\n DatastaxRails::Cql::CreateColumnFamily.new('schema_migrations').primary_key('cf')\n .columns(cf: :text, digest: :text, solrconfig: :text, stopwords: :text).execute\n end",
"def migration\n migration_template 'migration.rb', 'db/migrate/create_seo_landing_pages.rb'\n end",
"def install_migrations\n say_status :copying, \"migrations\"\n silence_stream(STDOUT) do\n silence_warnings { rake 'qe:install:migrations' }\n end\n end",
"def migrate\n maintenance = Heroku::PgMigrate::Maintenance.new(api, app)\n scale_zero = Heroku::PgMigrate::ScaleZero.new(api, app)\n rebind = Heroku::PgMigrate::RebindConfig.new(api, app)\n provision = Heroku::PgMigrate::Provision.new(api, app)\n foi_pgbackups = Heroku::PgMigrate::FindOrInstallPgBackups.new(api, app)\n transfer = Heroku::PgMigrate::Transfer.new(api, app)\n check_shared = Heroku::PgMigrate::CheckShared.new(api, app)\n release_num = Heroku::PgMigrate::ReleaseNumber.new(api, app)\n\n mp = Heroku::PgMigrate::MultiPhase.new()\n mp.enqueue(check_shared)\n mp.enqueue(foi_pgbackups)\n mp.enqueue(provision)\n mp.enqueue(release_num)\n mp.enqueue(maintenance)\n mp.enqueue(scale_zero)\n mp.enqueue(transfer)\n mp.enqueue(rebind)\n\n mp.engage()\n end",
"def supports_migrations?\n true\n end",
"def supports_migrations?\n true\n end",
"def supports_migrations?\n true\n end",
"def migrate_database(app_name, instance_name)\n Dir.chdir RailsPwnerer::Config[app_name, instance_name][:app_path] do\n # now migrate the database\n if File.exist?('Gemfile')\n Kernel.system 'bundle exec rake db:migrate RAILS_ENV=production'\n else\n Kernel.system 'rake db:migrate RAILS_ENV=production'\n end\n end\n end",
"def setup_db\n @database.create_table :merchants do\n primary_key :id\n String :name\n end\n\n @database.create_table :cards do\n primary_key :id\n String :token, :unique => true, :null => false\n Integer :limit, :null => false\n Integer :balance, :null => false\n Integer :velocity_limit\n Integer :velocity_interval\n end\n\n @database.create_table :txns do\n primary_key :id\n Integer :card_id, :null => false\n Integer :merchant_id, :null => false\n Integer :amount, :null => false\n DateTime :created_at, :null => false\n end\n\n @database.create_table :locks do\n String :id, :unique => true, :null => false\n DateTime :created_at\n end\n\n return true\n end",
"def migrate_database\n # Creating the new database\n ActiveRecord::Base.connection.execute(\"CREATE DATABASE `crowdvoice_installation_#{@new_install.name}`\")\n @default_config ||= ActiveRecord::Base.connection.instance_variable_get(\"@config\").dup\n\n # Connect to new database\n # TODO: Fix server name, shouldn't use the crowdvoice_installation prefix\n ActiveRecord::Base.establish_connection(@default_config.dup.update(:database => \"crowdvoice_installation_#{@new_install.name}\"))\n\n #Migrating database\n\n ActiveRecord::Migrator.migrate(\"db/migrate/\")\n @new_user = @old_user.clone\n @new_user.is_admin = true\n @new_user.save(:validate => false)\n @server_install = Installation.create(:email => @new_user.email, :name => \"crowdvoice-installation-#{@new_install.name}\")\n CustomAttribute.create(\n :name => @new_install.name,\n :logo => @new_install.name,\n :twitter => 'http://twitter.com/intent/tweet?source=webclient&text=Tracking+voices+of+protest+-+http%3A%2F%2Fwww.crowdvoice.org',\n :facebook => 'https://www.facebook.com/sharer.php?t=Tracking+voices+of+protest&u=http%3A%2F%2Fwww.crowdvoice.org',\n :title => @new_install.name,\n :message => \"Modify this message on your admin area!\")\n end",
"def migrated_up(migration)\n column_family.insert({\n data: {\n version: migration.version.to_s,\n name: migration.name,\n migrated_at: Time.now.utc,\n },\n })\n end",
"def create_migration_file\n #~ migration_template 'users.rb', 'db/migrate/create_users_table.rb',\n\t\t#~ migration_template 'profiles.rb', 'db/migrate/create_contact_details_table.rb',\n\t\t#~ migration_template 'subjects.rb', 'db/migrate/create_profiles_table.rb',\n\t\t#~ migration_template 'attachments.rb', 'db/migrate/create_attachments_table.rb'\n end",
"def define\n desc \"Generate a migration (don't forget to pass the migration name)\"\n task \"#{@name}:migrations:generate\", [:name] do |t, args|\n raise 'Need a migration name' unless args[:name]\n Enrar::Migration.new(args[:name]).generate!\n end\n\n desc \"Create the db\"\n task \"#{@name}:db:create\" do\n Enrar::DB.new.create!\n end\n\n desc \"Migrate the database (VERBOSE=true)\"\n task \"#{@name}:db:migrate\", [:version] do |t, args|\n Enrar::Migrator.new(args[:version], verbose: ENV['VERBOSE']).migrate!\n end\n self\n end",
"def generate_migrations(m)\n m.migration_template 'acts_as_attention_concept_migration.rb', File.join('db', 'migrate'),\n {:migration_file_name => \"create_attention_concepts\"}\n end",
"def generate_migration(tables)\n return if tables.empty? && @db_tables.empty?\n result.clear\n\n add_line \"Sequel.migration do\"\n indent do\n generate_migration_body(tables)\n end\n add_line \"end\\n\"\n\n result.join(\"\\n\")\n end",
"def up\n delete_queued_jobs(MIGRATION)\n\n requeue_background_migration_jobs_by_range_at_intervals(MIGRATION, DELAY_INTERVAL)\n end",
"def migrate\n # Create the directories\n vpc_dir = \"#{@migration_root}/vpc\"\n policies_dir = \"#{vpc_dir}/policies\"\n route_tables_dir = \"#{vpc_dir}/route-tables\"\n network_acls_dir = \"#{vpc_dir}/network-acls\"\n subnets_dir = \"#{vpc_dir}/subnets\"\n vpcs_dir = \"#{vpc_dir}/vpcs\"\n\n if !Dir.exists?(@migration_root)\n Dir.mkdir(@migration_root)\n end\n if !Dir.exists?(vpc_dir)\n Dir.mkdir(vpc_dir)\n end\n if !Dir.exists?(policies_dir)\n Dir.mkdir(policies_dir)\n end\n if !Dir.exists?(route_tables_dir)\n Dir.mkdir(route_tables_dir)\n end\n if !Dir.exists?(network_acls_dir)\n Dir.mkdir(network_acls_dir)\n end\n if !Dir.exists?(subnets_dir)\n Dir.mkdir(subnets_dir)\n end\n if !Dir.exists?(vpcs_dir)\n Dir.mkdir(vpcs_dir)\n end\n\n # Migrate the different assets\n migrate_policies(policies_dir)\n route_table_names = migrate_route_tables(route_tables_dir)\n network_acl_names = migrate_network_acls(network_acls_dir)\n subnet_names = migrate_subnets(subnets_dir, route_table_names, network_acl_names)\n migrate_vpcs(vpcs_dir, route_table_names, subnet_names, network_acl_names)\n end",
"def before_setup\n Account.connection.drop_table :accounts, if_exists: true\n Account.connection.exec_query(\"CREATE SEQUENCE accounts_id_seq\")\n Account.connection.exec_query(\"\n CREATE TABLE accounts (\n id BIGINT PRIMARY KEY DEFAULT nextval('accounts_id_seq'),\n firm_id bigint,\n firm_name character varying,\n credit_limit integer\n )\n \")\n\n Company.connection.drop_table :companies, if_exists: true\n Company.connection.exec_query(\"CREATE SEQUENCE companies_nonstd_seq\")\n Company.connection.exec_query(\"\n CREATE TABLE companies (\n id BIGINT PRIMARY KEY DEFAULT nextval('companies_nonstd_seq'),\n type character varying,\n firm_id bigint,\n firm_name character varying,\n name character varying,\n client_of bigint,\n rating bigint,\n account_id integer,\n description character varying\n )\n \")\n\n Course.connection.drop_table :courses, if_exists: true\n Course.connection.exec_query(\"CREATE SEQUENCE courses_id_seq\")\n Course.connection.exec_query(\"\n CREATE TABLE courses (\n id INT PRIMARY KEY DEFAULT nextval('courses_id_seq'),\n name character varying,\n college_id integer\n )\n \")\n\n self.class.fixtures :accounts\n self.class.fixtures :companies\n self.class.fixtures :courses\n end",
"def install_hydra_role_management\n generate('roles')\n rake('db:migrate')\n end",
"def upgrade(migrations, context, meta_node)\n migrations.each do |m|\n Neo4j.logger.info \"Running upgrade: #{m}\"\n m.execute_up(context, meta_node)\n end\n end"
] | [
"0.74758285",
"0.7456984",
"0.74459994",
"0.7405735",
"0.7358846",
"0.7246884",
"0.7202893",
"0.7149887",
"0.7090692",
"0.7038769",
"0.70298225",
"0.6974153",
"0.6974153",
"0.6860222",
"0.6855415",
"0.68009853",
"0.6789679",
"0.6785831",
"0.6687629",
"0.6627187",
"0.65969884",
"0.65969175",
"0.6581648",
"0.6576101",
"0.6576101",
"0.6576101",
"0.65502787",
"0.6541352",
"0.6537632",
"0.65344757",
"0.6503198",
"0.64786875",
"0.6477339",
"0.6474751",
"0.6464802",
"0.6464802",
"0.64552265",
"0.6432979",
"0.641437",
"0.640923",
"0.6406718",
"0.6393654",
"0.63614583",
"0.6351147",
"0.63342357",
"0.6252012",
"0.62516665",
"0.6247921",
"0.624321",
"0.6226114",
"0.62234795",
"0.6209111",
"0.62032366",
"0.6202465",
"0.6189521",
"0.61890817",
"0.6172849",
"0.6161806",
"0.6155643",
"0.61528695",
"0.61382854",
"0.61366886",
"0.61226237",
"0.6072594",
"0.6069431",
"0.6064707",
"0.60460764",
"0.60338855",
"0.6024447",
"0.6024447",
"0.60173297",
"0.60030234",
"0.5990212",
"0.5966125",
"0.5948062",
"0.5923989",
"0.591998",
"0.5887805",
"0.5887805",
"0.5884912",
"0.5861765",
"0.58603245",
"0.5858258",
"0.585468",
"0.5853647",
"0.58516777",
"0.58516777",
"0.58516777",
"0.58512855",
"0.58455026",
"0.5843492",
"0.58360195",
"0.58340514",
"0.5827657",
"0.58173287",
"0.58169556",
"0.5808196",
"0.5805272",
"0.5800815",
"0.57898974",
"0.5781427"
] | 0.0 | -1 |
returns array of arrays mapping annotation to value | def annotated_values_list(values_to_split=self.values)
values_to_split.split(VALUES_SPLITTER).map {|e|
k = e.gsub(/\((.*)?\)/, '')
$1 ? [$1, k] : [e,e]
}
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def [] name\n annotations_hash[name.to_sym]\n end",
"def annotations\n form = self.to_json\n annotations = Array.new\n annotations += bc_annotations\n annotations += question_annotations\n return annotations\n end",
"def spreadsheet_annotation_search_fields\n annotations = []\n unless content_blob.nil?\n content_blob.worksheets.each do |ws|\n ws.cell_ranges.each do |cell_range|\n annotations = annotations | cell_range.annotations.collect{|a| a.value.text}\n end\n end\n end\n annotations\n end",
"def spreadsheet_annotation_search_fields\n annotations = []\n if content_blob\n content_blob.worksheets.each do |ws|\n ws.cell_ranges.each do |cell_range|\n annotations |= cell_range.annotations.collect { |a| a.value.text }\n end\n end\n end\n annotations\n end",
"def values_array\n [@name, @expression.value]\n end",
"def cell_annotations\n cells = self.study.all_cells_array\n annot_values = self.concatenate_data_arrays(self.name, 'annotations')\n Hash[cells.zip(annot_values)]\n end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def annotations; end",
"def to_marker_arr(array)\n [array[0].marker, array[1].marker, array[2].marker]\n end",
"def values_array\n @_field_path.map{ |field_| @_values[field_.name] }\n end",
"def put_student_values_in_array\n instance_variables.map do |attribute|\n instance_variable_get(attribute)\n end\n end",
"def cell_annotations\n cells = self.study.all_cells_array\n # replace blank/nil values with default missing label\n annot_values = AnnotationVizService.sanitize_values_array(\n self.concatenate_data_arrays(self.name, 'annotations'), self.annotation_type\n )\n Hash[cells.zip(annot_values)]\n end",
"def annotations(resource_uri = nil)\n load unless loaded?\n if resource_uri.nil?\n @annotations[@uri]\n else\n @annotations[resource_uri] || []\n end\n end",
"def proptextlistarray(name) #:nodoc:\n @properties.select{ |f| f.name? name }.map{ |p| Vpim.decode_text_list(p.value_raw) }.flatten\n end",
"def to_ary\n attributes.map { |attr| attr.value if attr }\n end",
"def annotations_with_attribute(attrib, include_values = false)\n return [] if attrib.blank?\n\n obj_type = self.class.base_class.name\n\n options = {\n joins: :annotation_attribute,\n conditions: { annotatable_type: obj_type,\n annotatable_id: id,\n annotation_attributes: { name: attrib.strip.downcase } },\n order: 'updated_at DESC'\n }\n\n options[:include] = [:value] if include_values\n\n fetch_annotations(options)\n end",
"def propvaluearray(name) #:nodoc:\n @properties.select{ |f| f.name? name }.map{ |p| p.value }\n end",
"def annotations\n @annotations ||= Annotation.new :interval_id => self.id\n end",
"def cell_annotation_names_by_type(type)\n self.cell_annotations.select {|annotation| annotation['type'] == type}.map {|annotation| annotation['name']}\n end",
"def to_a\n\t\tarr = []\n\t\tpredictions.each do |pred|\n\t\t\tarr.push pred.to_a\n\t\tend\n\n\t\tarr\n\tend",
"def values\n entries.map {|e| e.value }\n end",
"def entries\n ary = []\n self.each{|*val|\n # __svalue is an internal method\n ary.push val.__svalue\n }\n ary\n end",
"def values(array)\n array.select{|elt| any?{|name| elt.kind_of?(name)}}.map{|elt| elt.value}\n end",
"def get_attribute_values() \n @attribute_values_flat\n end",
"def attribute_values\n attributes.values\n end",
"def values\n to_a\n end",
"def get_all_annotation page_number\n begin\n \n if @filename == ''\n raise 'filename not specified'\n end\n \n if page_number == ''\n raise 'page number not specified'\n end\n \n total_annotations = self.get_annotations_count(page_number)\n \n all_annotations = Array.new\n \n index = 1\n while index <= total_annotations\n \n all_annotations.push(self.get_annotation(page_number, index))\n \n index+=1\n end\n \n return all_annotations\n \n \n rescue Exception=>e\n print e\n end\n end",
"def available_values\n result = []\n for i in (0 .. @tag.length - 1)\n result << @tag[i].value\n end\n return result\n end",
"def available_values\n result = []\n for i in (0 .. @tag.length - 1)\n result << @tag[i].value\n end\n return result\n end",
"def values\n @navigable_map.values.to_a\n end",
"def guidance_annotations(org: nil, question: nil)\n raise ArgumentError unless question.respond_to?(:id)\n return [] unless hashified_annotations.key?(org)\n\n hashified_annotations[org].select do |annotation|\n (annotation.question_id == question.id) && (annotation.type == 'guidance')\n end\n end",
"def annotations_with_attribute_and_by_source(attrib, source, include_values = false)\n return [] if attrib.blank? || source.nil?\n\n obj_type = self.class.base_class.name\n\n options = {\n joins: :annotation_attribute,\n conditions: { annotatable_type: obj_type,\n annotatable_id: id,\n source_type: source.class.name,\n source_id: source.id,\n annotation_attributes: { name: attrib.strip.downcase } },\n order: 'updated_at DESC'\n }\n\n options[:include] = [:value] if include_values\n\n fetch_annotations(options)\n end",
"def to_a\n result = []\n each do |elem|\n result << elem[:attrs]\n end\n result\n end",
"def values\n []\n end",
"def enumeration_values\n enumeration = @xml.xpath('./xs:restriction/xs:enumeration', {'xs' => 'http://www.w3.org/2001/XMLSchema'})\n if enumeration.length > 0\n return enumeration.map {|elem| [elem.attributes['value'].value, elem.xpath('./xs:annotation/xs:documentation', {'xs' => 'http://www.w3.org/2001/XMLSchema'}).text]}\n else\n raise \"Not an enumeration\"\n end \n end",
"def array_of_values(name_hash)\n name_hash.collect do |key, value|\n value\n end\nend",
"def annotations\n @research_object.annotations(@uri)\n end",
"def annotations\n if @local\n @annotations\n else\n Codec::Data.to_object(Cproton.pn_disposition_annotations(@impl))\n end\n end",
"def project_attribute_array\n self.class.project_attributes.collect do |name|\n [ name.to_sym, send(name) ]\n end\n end",
"def values\n @attrs.values\n end",
"def annotation(*annotations)\n @metadata = @metadata.with_annotation(annotations.flat_map { |annotation|\n annotation.respond_to?(:each) ? annotation.each.to_a : annotation\n })\n end",
"def to_arr\n [red, green, blue]\n end",
"def values\n [@red, @green, @blue]\n end",
"def values\n rows.map{|r| r.value}\n end",
"def cell_metadata_values(metadata_name, metadata_type)\n cell_metadatum = self.cell_metadata.by_name_and_type(metadata_name, metadata_type)\n if cell_metadatum.present?\n cell_metadatum.cell_annotations\n else\n {}\n end\n end",
"def to_a\n attributes.to_a\n end",
"def extract_annotations_from(file, pattern); end",
"def attribute_values\n @columns.map do |column|\n @attributes[column.to_sym]\n end\n end",
"def attribute_values(attributes)\n attributes.map { |attribute| self.send(attribute) }\n end",
"def values\n @values ||= begin\n matches = []\n\n text.scan(VALUE_REGEXP) do\n offset = Regexp.last_match.offset(1)\n matches << loc.expression.adjust(begin_pos: offset.first)\n .with(end_pos: loc.expression.begin_pos + offset.last)\n end\n\n matches\n end\n end",
"def values\n @array.transpose[1]\n end",
"def make_array\n [@name, @author, @deviation_url, @views, @favs, @comments]\n end",
"def proptextarray(name) #:nodoc:\n @properties.select{ |f| f.name? name }.map{ |p| p.to_text }\n end",
"def to_a()\n array = @basenameList.map{|basename|\n @analyzerTable[basename] ;\n }\n return array ;\n end",
"def label_ids\n self[:value] ||= {}\n self[:value][:label_ids] ||= []\n @label_ids ||= multi_value? ? self[:value][:label_ids].collect{|l_id| l_id.to_i if l_id.to_i > 0 }.compact : []\n end",
"def annotate\n genes={}\n File.open(ANNOTATION,'r').each do |line|\n temp=line.split\n genes[temp[9]]={}\n genes[temp[9]][\"start\"]=temp[3].to_i\n genes[temp[9]][\"end\"]=temp[4].to_i\n genes[temp[9]][\"strand\"]=temp[6]\n genes[temp[9]][\"length\"]=temp[4].to_i - 1 - temp[3].to_i\n end\n return genes\nend",
"def genOutputLabelArray()\n return [\"timeIndex\",\n \"posIndex_x\", \"posIndex_y\", \"posIndex_z\",\n \"population\"] ;\n end",
"def values\n attribute_nodes.map(&:value)\n end",
"def semantic_values\n [semantic_value]\n end",
"def tags\n @values.keys.map {|tag| Values.tag_map[tag]}\n end",
"def [] field\n f = self.get(field)\n return nil if (f == nil)\n return f.values.to_a\n end",
"def indexes\n indexes = []\n @attributes.each_with_index {|values,idx| indexes << idx if values}\n indexes\n end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values\n to_enum(:each_value).to_a\n end",
"def to_array\n [@name, @city]\n end",
"def values_at( *attributes )\n\t\treturn attributes.collect do |attribute|\n\t\t\tself[ attribute ]\n\t\tend\n\tend",
"def cell_native_array(row_key, column_family, column_qualifier, value=nil, timestamp=nil)\n [\n row_key.to_s,\n column_family.to_s,\n column_qualifier.to_s,\n value.to_s\n ]\n end",
"def to_ary # :nodoc:\n attributes.to_a\n end",
"def values\n vals = []\n each{|k,v| vals << v}\n vals\n end",
"def labels\n multi_value? ? label_ids.collect{|l_id| Label.find(l_id) } : []\n end",
"def gather_annotations(streams)\n raw_annotations = streams.keys.inject(Array.new) do |a, s|\n if s.type.name == \"/logger/Annotations\"\n a.concat(s.samples.enum_for(:raw_each).to_a)\n end\n a\n end\n raw_annotations.map(&:last).sort_by { |s| s.raw_time.microseconds }\n end",
"def values(*) end",
"def names_array(movies)\n names_array = movies.collect {|name| name[:stars]}.flatten.uniq\n return names_array\nend",
"def parse_array(array)\n {}.tap do |ret|\n array.each_slice(2) do |(term, attr_array)|\n ret[term] = attr_array.each_slice(2).each_with_object({}) do |(key, val), hash|\n case key\n when 'tf'\n hash[:tf] = Integer(val)\n when 'positions'\n hash[:positions] = parse_position_list(val)\n when 'df'\n hash[:df] = Float(val)\n end\n # when 'offsets'\n # hash[:offsets] = parse_offset_list(val)\n # when 'tf-idf'\n # hash[:tfidf] = Float(val)\n end\n end\n end\n end",
"def values\n @fields.map { |field_name, field| field.values.map(&:to_s) }.transpose\n end",
"def parse_attribute_values\n values = []\n each_property do |name, value|\n values << AttributeValue.new(name, value)\n end\n\n DEFAULTS.each do |default|\n if values.find { |v| v.attribute.name == default.attribute.name }.nil?\n values << default\n end\n end\n\n values\n end",
"def to_a\n [value, timestamp]\n end",
"def values\n list = []\n each_value{|value| list << value}\n list\n end",
"def extract\n package_annotations = []\n\n @net.transitions.each do |transition|\n if line = extract_annotation(transition)\n package_annotations << line\n end\n end\n\n package_annotations << \".@ PackageName :: \\\"%s\\\"\" % @package_name if @package_name\n package_annotations << \".@ Editor :: \\\"%s\\\"\" % @editor if @editor\n package_annotations << \".@ Tag :: \\\"%s\\\"\" % @tag if @tag\n\n return package_annotations\n end",
"def values\n @values ||= []\n end",
"def to_a\n\t\tarr = []\n\t\tresults.each do |row|\n\t\t\tarr.push([row[\"user_id\"],row[\"movie_id\"],row[\"rating\"],row[\"predicted\"]])\n\t\tend\n\t\treturn arr\n\tend",
"def to_a\n array = []\n attributes.each do |value|\n array << deep_unwrap(value)\n end\n\n return array\n end",
"def to_a\n array = []\n attributes.each do |value|\n array << deep_unwrap(value)\n end\n\n return array\n end",
"def convert_to_array(attr)\n attr = attr.split(',')\n return attr\nend",
"def values() end",
"def get_array_final(name,value)\n\thash_final = Hash[name.zip(value.map)]\n\treturn hash_final\nend",
"def summarize_annotations\n\t\tresult = {is_expressed: 0, is_not_expressed: 0, not_sure_expressed: 0, is_bad_pattern: 0, total_anns: self.annotations.count}\n\t\tself.annotations.each do |ann|\n\t\t\tcase ann.annotation\n\t\t\twhen 0\n\t\t\t\tresult[:is_not_expressed] += 1\n\t\t\twhen 1\n\t\t\t\tresult[:is_expressed] += 1\n\t\t\twhen 2\n\t\t\t\tresult[:not_sure_expressed] += 1\n\t\t\tend\n\n\t\t\tif(!ann.is_bad_pattern.nil? && ann.is_bad_pattern)\n\t\t\t\tresult[:is_bad_pattern] += 1\n\t\t\tend\n\t\tend\n\t\tresult\n\tend",
"def get_contact_info_array\n rtn = []\n rtn << get_attr(:name)\n rtn << [:provided_by, self.provider.name] # Special case, as the symbol doesn't match the attribute\n rtn << get_attr(:phone)\n rtn << get_attr(:email)\n rtn << get_attr(:url)\n end"
] | [
"0.67670727",
"0.6676099",
"0.6590774",
"0.65003717",
"0.6200452",
"0.61243695",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60560536",
"0.60516894",
"0.5891641",
"0.58614075",
"0.58585715",
"0.5786706",
"0.56332755",
"0.5614939",
"0.56140226",
"0.5555911",
"0.55462515",
"0.5532151",
"0.551807",
"0.5514759",
"0.5486424",
"0.5470941",
"0.54444385",
"0.5439569",
"0.54383546",
"0.542933",
"0.5421025",
"0.5421025",
"0.53967845",
"0.53494835",
"0.5342466",
"0.53351134",
"0.5331682",
"0.5319645",
"0.5318491",
"0.53172594",
"0.5300225",
"0.5298278",
"0.5297461",
"0.52961284",
"0.5286694",
"0.52791363",
"0.52752477",
"0.5267659",
"0.52667296",
"0.5265506",
"0.5248612",
"0.5234582",
"0.5232702",
"0.5225362",
"0.52219415",
"0.5218448",
"0.5217501",
"0.52144027",
"0.521417",
"0.5213249",
"0.52088624",
"0.5206577",
"0.5183358",
"0.5177218",
"0.5174274",
"0.5169917",
"0.5169917",
"0.5169917",
"0.5169917",
"0.5169917",
"0.5169917",
"0.5169917",
"0.51661456",
"0.51638925",
"0.51594466",
"0.5152969",
"0.51528955",
"0.51517105",
"0.5144334",
"0.5140386",
"0.5132883",
"0.5132741",
"0.51319337",
"0.51318395",
"0.5130096",
"0.5122459",
"0.5109035",
"0.510522",
"0.51032877",
"0.50945324",
"0.5090689",
"0.5090689",
"0.5088466",
"0.5083919",
"0.5080651",
"0.5080628",
"0.5079416"
] | 0.62658453 | 4 |
Index all pages except pages matching any value in config[:indextank][:excludes] The main content from each page is extracted and indexed at indextank.com The doc_id of each indextank document will be the absolute url to the resource without domain name | def run(content, params={})
markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML, autolink: true, space_after_headers: true)
page_text = extract_text(markdown.render(item.raw_content))
title = item[:title] || item.identifier
file_name = item.identifier.to_s.gsub(/\//,'_')
puts "Indexing page: #{@item.identifier} to #{@tmp_index}/#{file_name}.idx"
unless Dir.exists?(@tmp_index)
Dir.mkdir(@tmp_index)
end
idx_file_name = "#{@tmp_index}/#{file_name}.idx"
if File.exists?(idx_file_name)
File.delete(idx_file_name)
end
File.open(idx_file_name,"w+") do |file|
file.write({title: title, text: page_text, tags: "api", loc: @item.path }.to_json)
end
content
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index_pages\n debug_msg \" generating pages search index\"\n\n pages = @files.select do |file|\n file.text?\n end\n\n pages.each do |page|\n debug_msg \" #{page.page_name}\"\n record = page.search_record\n @index[:searchIndex] << search_string(record.shift)\n @index[:longSearchIndex] << ''\n record.shift\n @index[:info] << record\n end\n end",
"def index\n get_own_documents\n if @page > @pages_amount && @pages_amount != 0\n @page = @pages_amount\n get_own_documents\n end\n render_js_or_html_index\n end",
"def generate_indexes\n @first_page = first_page\n generate_index('index')\n generate_index('indexes')\n end",
"def generate(site)\n puts 'Indexing pages...'\n \n # gather pages and posts\n items = site.pages.dup.concat(site.posts)\n\n # only process files that will be converted to .html and only non excluded files \n items = items.find_all {|i| i.output_ext == '.html' && ! @excludes.any? {|s| (i.absolute_url =~ Regexp.new(s)) != nil } } \n items.reject! {|i| i.data['exclude_from_search'] } \n \n # only process items that are changed since last regeneration\n items = items.find_all {|i| @last_indexed.nil? || File.mtime(i.full_path_to_source) > @last_indexed }\n\n # dont process index pages\n items.reject! {|i| i.is_a?(Jekyll::Page) && i.index? }\n\t\t\t \n while not @index.running?\n # wait for the indextank index to get ready\n sleep 0.5\n end\n \n items.each do |item| \n page_text = extract_text(site,item)\n\n @index.document(item.absolute_url).add({ \n :text => page_text,\n :title => item.data['title'] || item.name \n })\n puts 'Indexed ' << item.absolute_url\n end\n \n @last_indexed = Time.now\n write_last_indexed()\n \n puts 'Indexing done'\n end",
"def generate_indexes\n @posts = DB[:posts].all\n index_layout = File.read \"#{root}/_layouts/index.html\"\n @output = Liquid::Template.parse(index_layout).render('site'=>self)\n path = \"#{root}/_site/index.html\"\n File.open(path, 'w'){ |f| f.write(@output) }\n end",
"def index\r\n build_index unless @index\r\n @index\r\n end",
"def index\n @companies = Company.rank.where(\"tweet_count > 0\").page(params[:page]).per(30)\n @main_indices = MainIndex.all\n end",
"def process_index\n bindings = {\n :url => @definition.get_url,\n :name => @definition.get_name,\n :resources => @definition.resources,\n :description => @definition.get_description,\n :version => @definition.get_version\n }\n\n page = Calamum::DocGenerator.new(:index)\n page.save_template('index.html', bindings)\n end",
"def index\n set_current_query(@study)\n\n @entities = @study.sites.descend_by_created_at.search(get_current_search(Site)).paginate(:page => params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @entities }\n format.js {\n if params[@template.dom_id_nested(:commit, @study)].blank? && params[:page].blank?\n render :file => 'admin/study_sites/index.html.erb'\n else\n render(:update) {|page| page[dom_id(@study, :sites_index)].replace :file => \"admin/study_sites/index\" }\n end\n }\n end\n end",
"def index_disabled\n @index_disabled ||= false\n end",
"def index\n render file: 'public/404.html',\n status: :not_found,\n layout: false unless current_user\n @transcripts = Transcript.order_by(:date.desc).page(params[:page])\n end",
"def gen_main_index\n template = RDoc::TemplatePage.new @template::INDEX\n\n open 'index.html', 'w' do |f|\n classes = @classes.sort.map { |klass| klass.value_hash }\n\n values = {\n 'main_page' => @main_page,\n 'initial_page' => main_url,\n 'style_url' => style_url('', @options.css),\n 'title' => CGI.escapeHTML(@options.title),\n 'charset' => @options.charset,\n 'classes' => classes,\n }\n\n values['inline_source'] = @options.inline_source\n\n template.write_html_on f, values\n end\n end",
"def index_documents\n @params = {}\n @action = 'index_documents'\n \n send_auth_request\n end",
"def index\n authorize IndexPage\n @index_pages = IndexPage.\n where(institution: current_institution).\n order(:name)\n end",
"def all_indexes\n Chewy.eager_load!\n Chewy::Index.descendants - [Chewy::Stash::Journal, Chewy::Stash::Specification]\n end",
"def index\n get_own_lessons\n if @page > @pages_amount && @pages_amount != 0\n @page = @pages_amount\n get_own_lessons\n end\n render_js_or_html_index\n end",
"def main_pages\n @mainpages = Webpage.find(:all, :conditions => ['page_alias NOT IN (?) AND is_root != ? AND enabled = ?', ['about', 'contact'], true, true]);\n end",
"def generate_index\n page = {}\n page[:title] = nil\n page[:body] = \"\"\n \n @posts.keys.sort.reverse.each_with_index do |date, i|\n if i >= @config[:front_page_entries]\n break\n else\n post = @posts[date]\n \n # Don't post the whole thing on the front page.\n unless post[:excerpt].nil?\n post[:body] = ''\n end\n\n page[:body] << generate_post(@posts[date])\n end\n end\n\n File.open(File.join(@site_path, 'index.html'), 'w') do |f|\n f << generate_page(page)\n end\n end",
"def index\n @ignores = Analytics::Ignore.all\n\n respond_to do |format|\n format.html # index.html.erb\n end\n end",
"def index\n semantic_breadcrumb @project.name\n if @project.manager?(current_user)\n @documents = @project.documents\n else\n ids = @project.assigns.where('user_id = ?', current_user.id).all.map{|d| d.document_id}\n @documents = Document.where('id in (?)', ids)\n end\n @documents = @documents.where(\"did = ?\", params[:did]) if params[:did].present?\n @documents = @documents.where(\"done = ?\", params[:done] == \"true\") if params[:done].present?\n @documents = @documents.where(\"curatable = ?\", params[:curatable] == \"true\") if params[:curatable].present?\n @documents.includes(:assigns)\n if params[:term].present?\n @documents = @documents.where(\"did like ? or title like ? or xml like ?\", \"%#{params[:term]}%\", \"%#{params[:term]}%\", \"%#{params[:term]}%\")\n end\n\n if sort_column == \"default\"\n @documents = @documents.order(\"batch_id DESC, batch_no ASC, id DESC\")\n else \n @documents = @documents.order(sort_column + \" \" + sort_direction)\n end\n @documents = @documents.order(\"batch_id DESC, batch_no ASC, id DESC\")\n unless request.format.json?\n # @documents = @documents.order(\"batch_id DESC, batch_no ASC, id DESC\").page(params[:page])\n @documents = @documents.page(params[:page])\n end \n end",
"def delete_tank_indexes\n tanker_config.index.delete_document(it_doc_id)\n end",
"def get_nondir_pages( directory )\n Dir[ File.join( directory, \"*/index.src\" ) ]\n end",
"def build_index\n reset @store.all_files.sort, @store.all_classes_and_modules.sort\n\n index_classes\n index_methods\n index_pages\n\n { :index => @index }\n end",
"def index \n @transparentnavbar = true\n @index_body = true\n \n if params[:search]\n # @search = Sunspot.search Newsroom do\n # fulltext params[:search]\n #ends\n @press_releases = PressRelease.where(exclusive: false).where(\"embargo <= ?\", Date.today).search(params[:search])\n else \n @press_releases = PressRelease.includes(:uploads).all.order(\"press_releases.embargo DESC\").where(exclusive: false).where(\"embargo <= ?\", Date.today).where.not(uploads: { file_file_name: nil }).where.not(title: nil).paginate(:page => params[:page], :per_page => 4)\n end\n \n end",
"def index_all\n puts \"Indexing everything\"\n MojoDNA::Searchable::RemoteSearchable::searcher.push( [:index_all, self.inspect.to_s, nil] )\n end",
"def index_finalized\n index\n end",
"def scan_pages0( index, directory )\n unless index\n index = Page.new( File.join( directory, \"index.src\" ) )\n @root = index \n end\n \n @subdir_pages = get_nondir_pages( directory )\n @nondir_pages = get_subdir_pages( directory )\n \n @nondir_pages.each do |page_src|\n Page.new( page_src, index )\n end\n @subdir_pages.each do |page_src|\n page = Page.new( page_src, index )\n scan_pages( config, page, File.dirname( page_src ) )\n end\n end",
"def generate(site)\n Jekyll.logger.info \"Lunr:\", 'Creating search index...'\n \n @site = site\n # gather pages and posts\n data = pages_to_index(site)\n items = data[:items]\n index = []\n \n index_js = open(@lunr_path).read\n index_js << 'var idx = lunr(function() {this.pipeline.remove(lunr.stemmer);this.searchPipeline.remove(lunr.stemmer);this.pipeline.remove(lunr.stopWordFilter);this.searchPipeline.remove(lunr.stopWordFilter);this.tokenizer.separator = /[\\s,.;:/?!()]+/;'\n @lunr_config['fields'].each_pair do |name, boost|\n index_js << \"this.field('#{name}', {'boost': #{boost}});\"\n end\n items.each_with_index do |item_data, i|\n doc = {}\n flat_data = {}\n item = item_data.to_liquid\n if item['recordstatus'] != 'inactive' or ENV['JEKYLL_ENV'] != 'production'\n @config[\"fields\"].each do |field|\n field[\"jekyllfields\"].each do |jekyllfield|\n widget = field['widget']\n orig_field = item[jekyllfield]\n if widget\n if widget == 'flatten' && orig_field\n orig_field = orig_field.values.flatten()\n end\n if widget == 'relational'\n if field['secondaryfield']\n orig_field = site.collections[field['collection']].docs.collect {|collection| collection[jekyllfield] if collection.to_liquid[field['matchfield']] and collection.to_liquid[field['matchfield']].map{ |i| i[field['secondaryfield']] }.include? item['slug'] }\n else\n orig_field = site.collections[field['collection']].docs.collect {|collection| collection[jekyllfield] if collection.to_liquid[field['matchfield']] and collection.to_liquid[field['matchfield']].include? item['slug'] }\n end\n end\n if widget == 'nested'\n if item[field[\"parentfield\"]]\n if item[field[\"parentfield\"]].class == Array\n orig_field = item[field[\"parentfield\"]].map {| parent | parent[jekyllfield]}\n else\n orig_field = item[field[\"parentfield\"]][jekyllfield]\n end\n end\n end\n if orig_field\n orig_field = orig_field.compact.uniq.flatten()\n orig_field = [].concat(orig_field)\n end\n flat_data[field[\"searchfield\"]] = flat_data[field[\"searchfield\"]] ? flat_data[field[\"searchfield\"]].concat(orig_field) : orig_field\n end\n format_field = orig_field.class == Array ? orig_field.compact.uniq.join(\" \") : orig_field\n if format_field != nil\n if doc[field[\"searchfield\"]] == nil\n doc[field[\"searchfield\"]] = format_field.strip()\n else\n doc[field[\"searchfield\"]] += \" \" + format_field.strip()\n end\n end\n end\n end\n doc['id'] = item['slug']\n index_js << 'this.add(' << ::JSON.generate(doc, quirks_mode: true) << ');'\n final_dict = item.to_hash\n final_dict['content'] = Nokogiri::HTML(Kramdown::Document.new(item.content).to_html).text.tr(\"\\n\",\" \")\n @docs[item[\"slug\"]] = final_dict.merge(flat_data)\n Jekyll.logger.debug \"Lunr:\", (item['title'] ? \"#{item['title']} (#{item['url']})\" : item['url'])\n end\n end\n index_js << '});'\n FileUtils.mkdir_p(File.join(site.dest, @js_dir))\n FileUtils.mkdir_p(File.join(site.dest, @css_dir))\n filename = File.join(@js_dir, 'index.js')\n \n ctx = ExecJS.compile(index_js)\n \n index = ctx.eval('JSON.stringify(idx)')\n total = \"var docs = #{@docs.to_json}\\nvar index = #{index.to_json}\\nvar baseurl = #{@jekyllconfig['baseurl'].to_json}\\nvar lunr_settings = #{@config.to_json}\"\n filepath = File.join(site.dest, filename)\n File.open(filepath, \"w\") { |f| f.write(total) }\n Jekyll.logger.info \"Lunr:\", \"Index ready (lunr.js v#{@lunr_version})\"\n added_files = [filename]\n \n site_js = File.join(site.dest, @js_dir)\n site_css = File.join(site.dest, @css_dir)\n \n # If we're using the gem, add the lunr and search JS files to the _site\n if File.expand_path(site_js) != File.dirname(@lunr_path)\n extras = Dir.glob(File.join(File.dirname(@lunr_path), \"*.js\"))\n if extras.length > 0\n FileUtils.cp(extras, site_js)\n extras.map! { |min| File.join(@js_dir, File.basename(min)) }\n Jekyll.logger.debug \"Lunr:\", \"Added JavaScript to #{@js_dir}\"\n added_files.push(*extras)\n end\n extrascss = Dir.glob(File.join(File.dirname(@lunr_path), \"*.css\"))\n if extrascss.length > 0\n FileUtils.cp(extrascss, site_css)\n extrascss.map! { |min| File.join(@css_dir, File.basename(min)) }\n Jekyll.logger.debug \"Lunr:\", \"Added CSS to #{@css_dir}\"\n added_files.push(*extrascss)\n end\n end\n \n # Keep the written files from being cleaned by Jekyll\n added_files.each do |filename|\n site.static_files << SearchIndexFile.new(site, site.dest, \"/\", filename)\n end\n end",
"def index\n return unless auth_gdocs\n @folders = get_gfolders()\n return if !@folders\n render 'document/index'\n end",
"def index\n @documents = Document.query( :string => params[:string], :site => params[:site_id] )\n @site = Site.first(conditions: {slug: params[:site_id]}) if params[:site_id]\n @documents = @documents.page(params[:page])\n respond_with @documents\n end",
"def index_file(file, pages_dir, stopwords, file_data)\n # Removing the dir from the file name\n # begin\n actual_name = file.gsub(pages_dir, \"\")\n # rescue NoMethodError\n# actual_name = badpage.html\n \n\n # Resetting the file path\n file_path = \"\"\n file_path = File.expand_path(\".\") + \"/\" + file\n\n print \"Parsing HTML document: \" + actual_name + \" \\n\"\n\n # Finding all the tokens in the file\n tokens = find_tokens(file_path)\n\n # Getting the page title, word count, and page url\n page_title = get_title(file_path)\n word_count = tokens.length\n page_url = file_data[actual_name]\n\n # Updating the docindex hash\n $docindex[file.gsub(pages_dir, \"\")] = [word_count, page_title, page_url]\n\n # Removing the stop words and getting the stem words in the file\n tokens = remove_stop_tokens(tokens, stopwords)\n tokens = stem_tokens(tokens)\n\n # Creating the invindex hash table\n for token in tokens\n begin\n if $invindex.member?(token)\n if $invindex[token].member?(actual_name)\n $invindex[token][actual_name] += 1\n else\n $invindex[token][actual_name] = 1\n end\n else\n $invindex[token] = {actual_name => 1}\n end\n # end\n# rescue NoMethodError\n # puts \"NoMethodError\"\n end\n #puts file_name\n # title = nil\n end\n #end\nend",
"def index\n\n # if we’re searching\n if params.key?( :search ) and action_allowed?( :search )\n\n # we’re searching: attempt to do a custom search\n @content = add_pagination_and_sorting( custom_search )\n\n else\n\n # otherwise, grab all the things\n @content = add_pagination_and_sorting( custom_index )\n\n end\n\n generate_titles\n\n end",
"def generate_all_teachers_index(site)\n # puts \"55555555555555555555555555555555555555555555555555555555\"\n allTeachers = AllTeachersIndex.new(site, site.source, \"/pages/teachers\")\n allTeachers.render(site.layouts, site.site_payload)\n allTeachers.write(site.dest)\n\n site.pages << allTeachers\n site.static_files << allTeachers\n end",
"def update_tank_indexes\n if tanker_indexable?\n tanker_config.index.add_document(\n it_doc_id, tanker_index_data, tanker_index_options\n )\n else\n delete_tank_indexes\n end\n end",
"def gen_main_index\n if @template.const_defined? :FRAMELESS then\n #\n # If we're using a template without frames, then just redirect\n # to it from index.html.\n #\n # One alternative to this, expanding the main page's template into\n # index.html, is tricky because the relative URLs will be different\n # (since index.html is located in at the site's root,\n # rather than within a files or a classes subdirectory).\n #\n open 'index.html', 'w' do |f|\n f.puts(%{<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"\n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">})\n f.puts(%{<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\"\n lang=\"en\">})\n f.puts(%{<head>})\n f.puts(%{<title>#{CGI.escapeHTML(@options.title)}</title>})\n f.puts(%{<meta http-equiv=\"refresh\" content=\"0; url=#{@main_url}\" />})\n f.puts(%{</head>})\n f.puts(%{<body></body>})\n f.puts(%{</html>})\n end\n else\n main = RDoc::TemplatePage.new @template::INDEX\n\n open 'index.html', 'w' do |f|\n style_url = style_url '', @options.css\n\n classes = @classes.sort.map { |klass| klass.value_hash }\n\n values = {\n :initial_page => @main_url,\n :style_url => style_url('', @options.css),\n :title => CGI.escapeHTML(@options.title),\n :charset => @options.charset,\n :classes => classes,\n }\n\n values[:inline_source] = @options.inline_source\n\n main.write_html_on f, values\n end\n end\n end",
"def index\n @documents = Document.not_templates.publicly_available\n render layout: \"layouts/public\"\n end",
"def index\n if params[:q].present?\n @results = Page.search(query: { fuzzy: { content: { value: params[:q] } } }, highlight: { fields: { content: {} } })\n @has_highlights = true\n else\n @results = Page.search('*')\n end\n @pages = @results.records\n end",
"def index\n @index ||= Crawler::Index.new(base_uri)\n end",
"def show\n @companies = @sub_index.companies.where(\"tweet_count > 0\").page(params[:page]).per(30)\n @main_indices = MainIndex.all\n render :template => \"companies/index\"\n end",
"def index\n @specific_contents = SpecificContent.all\n end",
"def purge_unused_page_index(index)\n purge_page_index(index)\n end",
"def index_site(\n url, insert_externals: true, allow_paths: nil, disallow_paths: nil\n )\n crawl_opts = { allow_paths: allow_paths, disallow_paths: disallow_paths }\n total_pages_indexed = 0\n\n ext_urls = @crawler.crawl_site(url, crawl_opts) do |doc|\n result = true\n result = yield(doc) if block_given?\n\n if result && !doc.empty? && write_doc_to_db(doc)\n total_pages_indexed += 1\n Wgit.logger.info(\"Crawled and saved internal page: #{doc.url}\")\n end\n end\n\n @db.url?(url) ? @db.update(url) : @db.insert(url)\n\n if insert_externals && ext_urls\n num_inserted_urls = write_urls_to_db(ext_urls)\n Wgit.logger.info(\"Found and saved #{num_inserted_urls} external url(s)\")\n end\n\n Wgit.logger.info(\"Crawled and saved #{total_pages_indexed} docs for the \\\nsite: #{url}\")\n\n total_pages_indexed\n end",
"def _index_document(opts = {})\n index_document(opts)\n end",
"def index\n # @process_lts = ProcessLt.where([get_query_string])\n index_admin\n end",
"def index_all\n @id = params[:device_id]\n @setting = Device.find_by(device_id: params[:device_id])\n @date = params[:date] || \"\"\n @page = (params[:page] || \"1\").to_i\n pagesize = 24\n @colsize = 2 # col-sm-#{@colsize}, the size for bootstrap column.\n\n if params[:head] && params[:tail]\n head = params[:head].to_i\n tail = params[:tail].to_i\n end\n\n skipped = 0\n @total = 0\n @files = []\n @n_pages = 0\n\n dir = BASEDIR+\"/#{@id}\"\n return unless File.directory? dir\n Dir.entries(dir).sort.reverse.each do |f|\n next if f.start_with? \".\"\n next unless f.end_with? \".jpg\"\n next unless f.start_with? @date\n if head.is_a? Fixnum\n m = f.sub(\"_\",\"\").match(/([0-9]{12}).jpg/)\n seq = m[1].to_i\n raise \"#{f}\" if seq.nil?\n break if seq < tail\n next if head < seq\n end\n @total += 1\n if skipped < (@page - 1) * pagesize\n skipped += 1\n next\n end\n if @files.length < pagesize\n @files += [f]\n end\n end\n @n_pages = @total / pagesize + (@total % pagesize == 0 ? 0 : 1)\n\n @dates = load_index(dir)\n\n @n_watchers = get_access_log\n end",
"def index\n @blacklisted_site_search = params[:blacklisted_site_search]\n conditions = [\"url like ?\", \"%#{@blacklisted_site_search}%\"] unless @blacklisted_site_search.nil?\n @blacklisted_sites = BlacklistedSite.paginate(:all, :conditions => conditions, :order => \"url ASC\", :page => params[:page])\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @blacklisted_sites }\n end\n end",
"def index\n @bookmarks = token_or_current_or_guest_user.bookmarks\n bookmark_ids = @bookmarks.collect { |b| b.document_id.to_s }\n\n if bookmark_ids.empty?\n @response = Blacklight::Solr::Response.new({}, {})\n @document_list = []\n else\n query_params = {\n q: bookmarks_query(bookmark_ids),\n defType: 'lucene',\n rows: bookmark_ids.count\n }\n # search_service.fetch does this internally (7.25)\n @response = search_service.repository.search(query_params)\n @document_list = ActiveSupport::Deprecation::DeprecatedObjectProxy.new(@response.documents, 'The @document_list instance variable is now deprecated and will be removed in Blacklight 8.0')\n end\n\n respond_to do |format|\n format.html {}\n format.rss { render layout: false }\n format.atom { render layout: false }\n\n additional_response_formats(format)\n document_export_formats(format)\n end\n end",
"def index\n set_current_query(@study)\n page = params[@template.dom_id_nested(:page, @study)]\n @entities = @study.contacts.descend_by_id.search(get_current_search(User)).paginate(:page => page)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @entities }\n format.js {\n if params[@template.dom_id_nested(:commit, @study)].blank? && page.blank?\n render :file => 'admin/study_contacts/index.html.erb'\n else\n render(:update) {|page| page[dom_id(@study, :contacts_index)].replace :file => \"admin/study_contacts/index\" }\n end\n }\n end\n end",
"def index_disabled= bool\n @index_disabled = bool\n end",
"def index\n @documents = Document.where(\"deleted = ?\", false).paginate(:page => params[:page], :per_page => 10).order(created_at: :desc)\n\n end",
"def exclude_links(*array)\n @excluded_link_indexes = array\n end",
"def index\n @@semaphore.synchronize {\n inner_index()\n }\n end",
"def index\n get_own_media_elements\n if @page > @pages_amount && @pages_amount != 0\n @page = @pages_amount\n get_own_media_elements\n end\n render_js_or_html_index\n end",
"def index\n return @content.index\n end",
"def index\n if params[:search].present?\n\n search = params[:search].present? ? params[:search] : \"*\"\n where = {}\n\n # if params[:date_filter].present?\n # where[:date] = {\n # gte: DateTime.strptime(params[:date_filter], '%m/%d/%Y %l:%M %p').beginning_of_day,\n # lte: DateTime.strptime(params[:date_filter], '%m/%d/%Y %l:%M %p').end_of_day\n # }\n # end\n\n @for_bidding = Project.search( search, where: where.merge(status: 1), order: {created_at: :desc}, misspellings: false )\n @ongoing = Project.search( search, where: where.merge(status: 2), order: {created_at: :desc}, misspellings: false )\n @completed = Project.search( search, where: where.merge(status: 3), order: {created_at: :desc}, misspellings: false )\n @funding = Project.search( search, where: where.merge(status: 4), order: {created_at: :desc}, misspellings: false )\n\n # @ingoing = Document.search( search, where: where.merge(:outgoing => false), order: {created_at: :desc}, misspellings: false )\n # @outgoing = Document.search( search, where: where.merge(:outgoing => true), order: {created_at: :desc}, misspellings: false )\n\n else\n @projects = Project.all\n @for_bidding = @projects.where(status: 1).order('created_at desc')\n @ongoing = @projects.where(status: 2).order('created_at desc')\n @completed = @projects.where(status: 3).order('created_at desc')\n @funding = @projects.where(status: 4).order('created_at desc')\n end\n\n respond_to do |format|\n format.html\n format.xlsx {\n render xlsx: \"index\", filename: \"documents_spreadsheet.xlsx\"\n }\n end\n end",
"def index\n @scraping_pages = ScrapingPage.all\n end",
"def prune_index\n# missing_files=index_time_dbm_file.reject{|filename,itime| File.exists?(filename) && Picolena::IndexedDirectories.any?{|dir,alias_path| filename.starts_with?(dir)}}\n# missing_files.each{|filename, itime|\n# index.writer.delete(:complete_path, filename)\n# index_time_dbm_file.delete(filename)\n# logger.debug \"Removed : #{filename}\"\n# }\n# index.optimize\n end",
"def index\n page = session[:last_documents_page] = params[:page] || session[:last_documents_page] || '1'\n @documents_per_page = params[:documents_per_page] || cookies[:documents_per_page] || Elph[:items_per_page].to_s\n cookies[:documents_per_page] = { :value => @documents_per_page, :expires => 1.year.from_now }\n @documents = Document.own_documents.non_aliases.page(page).per(@documents_per_page).order('date desc, id desc')\n \n #fire_log self.public_methods.sort, 'publ_m'\n fire_log controller_name, 'controller_name'\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @documents }\n end\n end",
"def index\n @root_pages = [fetch_root_page].flatten\n end",
"def add_to_index?(url)\n print "\\n- indexing #{url}"\n t0 = Time.now\n page = Page.find(scrub(url))\n \n # if the page is not in the index, then index it\n if page.new_record? then\n index(url) { |doc_words, title|\n dsize = doc_words.size.to_f\n puts " [new] - (#{dsize.to_i} words)"\n doc_words.each_with_index { |w, l|\n printf("\\r\\e - %6.2f%",(l*100/dsize))\n loc = Location.new(:position => l)\n loc.word, loc.page, page.title = Word.find(w), page, title\n loc.save\n }\n }\n \n # if it is but it is not fresh, then update it\n elsif not page.fresh? then\n index(url) { |doc_words, title|\n dsize = doc_words.size.to_f\n puts " [refreshed] - (#{dsize.to_i} words)"\n page.locations.destroy!\n doc_words.each_with_index { |w, l|\n printf("\\r\\e - %6.2f%",(l*100/dsize))\n loc = Location.new(:position => l)\n loc.word, loc.page, page.title = Word.find(w), page, title\n loc.save\n }\n }\n page.refresh\n \n #otherwise just ignore it\n else\n puts " - (x) already indexed"\n return false\n end\n t1 = Time.now\n puts " [%6.2f sec]" % (t1 - t0)\n return true\n end\n \n # scrub the given link\n def scrub(link, host=nil)\n unless link.nil? then\n return nil if DO_NOT_CRAWL_TYPES.include? link[(link.size-4)..link.size] or link.include? '?' or link.include? '/cgi-bin/' or link.include? '&' or link[0..8] == 'javascript' or link[0..5] == 'mailto'\n link = link.index('#') == 0 ? '' : link[0..link.index('#')-1] if link.include? '#'\n if link[0..3] == 'http'\n url = URI.join(URI.escape(link))\n else\n url = URI.join(host, URI.escape(link))\n end\n return url.normalize.to_s\n end\n end\n \n # do the common indexing work\n def index(url)\n open(url, "User-Agent" => USER_AGENT){ |doc|\n h = Hpricot(doc)\n title, body = h.search('title').text.strip, h.search('body')\n %w(style noscript script form img).each { |tag| body.search(tag).remove}\n array = []\n body.first.traverse_element {|element| array << element.to_s.strip.gsub(/[^a-zA-Z ]/, '') if element.text? }\n array.delete("")\n yield(array.join(" ").words, title)\n }\n end\nend\n \n$stdout.sync = true\nspider = Spider.new\nspider.start\n",
"def index\n ## BUILD MASTER HASH WITH ALL SUB-PARAMS ##\n allparams = {}\n allparams[:mainmodel] = StorageController\n allparams[:webparams] = params\n results = Search.new(allparams).search\n\n flash[:error] = results[:errors].join('<br />') unless results[:errors].empty?\n includes = results[:includes]\n results[:requested_includes].each_pair{|k,v| includes[k] = v}\n @objects = results[:search_results]\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @objects.to_xml(:include => convert_includes(includes), :dasherize => false) }\n end\n end",
"def index ; @index ; end",
"def index\n\n\t\t# GET ALL THE BLOG POSTS\n @posts = Post.find(:all, :limit => 5)\n @downloads = Download.find(:all, :limit => 5, :conditions => {:parent_id => nil})\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @posts }\n end\n end",
"def index\n ## BUILD MASTER HASH WITH ALL SUB-PARAMS ##\n allparams = {}\n allparams[:mainmodel] = NodeRack\n allparams[:webparams] = params\n results = Search.new(allparams).search\n\n flash[:error] = results[:errors].join('<br />') unless results[:errors].empty?\n includes = results[:includes]\n results[:requested_includes].each_pair{|k,v| includes[k] = v}\n @objects = results[:search_results]\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @objects.to_xml(:include => convert_includes(includes),\n :dasherize => false) }\n end\n end",
"def do_index\n @citation = Footnote.new\n @citations = @chapter.citations.includes(:biblioentry).order('biblioentries.name')\n @noted = @chapter\n @biblioentries = @citations.collect { |x| x.biblioentry }.sort.uniq\n all_citations = Footnote.includes(:biblioentry).where(slug: '').order('biblioentries.name')\n @all_citations = []\n all_citations.each do |citation|\n unless @all_citations.include?(citation.biblioentry) || @biblioentries.include?(citation.biblioentry)\n @all_citations << citation.biblioentry\n end\n end\n end",
"def index\n @results = {}\n\n if TeSS::Config.solr_enabled\n SEARCH_MODELS.each do |model_name|\n model = model_name.constantize\n @results[model_name.underscore.pluralize.to_sym] = Sunspot.search(model) do\n fulltext search_params\n\n with('end').greater_than(Time.zone.now) if model_name == 'Event'\n\n\n # Hide failing records\n if model.method_defined?(:link_monitor)\n unless current_user && current_user.is_admin?\n without(:failing, true)\n end\n end\n\n if model.attribute_method?(:user_requires_approval?)\n # TODO: Fix this duplication!\n # Hide shadowbanned users' events, except from other shadowbanned users and administrators\n unless current_user && (current_user.shadowbanned? || current_user.is_admin?)\n without(:shadowbanned, true)\n end\n\n # Hide unverified users' things, except from curators and admins\n unless current_user && (current_user.is_curator? || current_user.is_admin?)\n without(:unverified, true)\n end\n end\n end\n end\n \n end\n\n @results.reject! { |_, result| result.total < 1 }\n end",
"def index\n if signed_in?\n @documents = Document.page(params[:page])\n else\n @documents = Document.page(params[:page]).where(:is_private => false)\n end\n end",
"def index\n @root_pages = Page.root_pages\n @uncategorized_pages = Page.uncategorized\n end",
"def generate_directory_index(dir)\n @log.debug(\" creating an index for #{dir}\")\n link_root = dir.gsub(@site_path, @config[:site_path]) \n \n links = {}\n\n # Construct a list of links.\n Dir.foreach(dir) do |entry|\n unless ['.', '..'].include?(entry)\n if File.directory?(File.join(dir, entry))\n #@log.debug(\" adding entry for #{entry}\")\n links[entry] = File.join(link_root, entry)\n end\n \n if entry =~ /(\\d{2})(\\d{2})\\.post/\n #@log.debug(\" adding entry for #{entry}\")\n links[\"#{$1}:#{$2}\"] = File.join(link_root, entry.gsub('post', 'html'))\n end\n end\n end\n\n page = {}\n page[:body] = ''\n\n # If we're at the top of the 'posts' directory, assign the page the title\n # of @config[:archive_title].\n if File.join(@site_path, @config[:post_path]) == dir\n page[:title] = @config[:archive_title]\n else\n page[:title] = File.basename(dir)\n end\n\n links.keys.sort.reverse.each do |k|\n page[:body] << \"<h3><a href='#{links[k]}'>#{k}</a></h3>\"\n end\n\n File.open(File.join(dir, 'index.html'), 'w') do |f|\n f << generate_page(page)\n end\n\n #@log.debug(\" generated an index for #{dir}\")\n end",
"def index\n @page_chunks = PageChunk.all\n end",
"def index; @index; end",
"def process(pages)\n robot = Robots.new USER_AGENT\n until pages.nil? or pages.empty?\n newfound_pages = []\n pages.each { |page|\n begin\n if add_to_index?(page) then\n uri = URI.parse(page)\n host = "#{uri.scheme}://#{uri.host}"\n open(page, "User-Agent" => USER_AGENT) { |s|\n (Hpricot(s)/"a").each { |a|\n url = scrub(a.attributes['href'], host)\n newfound_pages << url unless url.nil? or !robot.allowed? url or newfound_pages.include? url\n }\n }\n end\n rescue => e\n print "\\n** Error encountered crawling - #{page} - #{e.to_s}"\n rescue Timeout::Error => e\n print "\\n** Timeout encountered - #{page} - #{e.to_s}"\n end\n }\n pages = newfound_pages\n File.open(LAST_CRAWLED_PAGES, 'w') { |out| YAML.dump(newfound_pages, out) }\n end",
"def index\n @page = Page.find(params[:page_id])\n @urls = Url.where(page_id: params[:page_id])\n end",
"def index\n @title = t 'view.contents.index_title'\n @searchable = true\n @contents = @contents.filtered_list(params[:q]).page(params[:page])\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @contents }\n end\n end",
"def prune_index\n missing_files=index_time_dbm_file.reject{|filename,itime| File.exists?(filename) && Picolena::IndexedDirectories.any?{|dir,alias_path| filename.starts_with?(dir)}}\n missing_files.each{|filename, itime|\n index.writer.delete(:complete_path, filename)\n index_time_dbm_file.delete(filename)\n logger.debug \"Removed : #{filename}\"\n }\n index.optimize\n end",
"def write_archive_indexes\n if self.layouts.key? 'archive_index'\n dir = self.config['archive_dir'] || 'archives'\n posts_by_year_month = self.posts.inject({}) do |h, post|\n ((h[post.year] ||= {})[post.month] ||= []) << post\n h\n end.each do |year, months|\n months.each do |month, posts|\n self.write_archive_index(File.join(dir, year, month), posts.reverse, year, month)\n end\n end\n\n # Throw an exception if the layout couldn't be found.\n else\n throw \"No 'archive_index' layout found.\"\n end\n end",
"def index\n @documents_ids = [*1..30]\n end",
"def index\n \n @docfiles = Docfile.all\n end",
"def indexAllItems\n begin\n Thread.current[:name] = \"index thread\" # label all stdout from this thread\n batch = emptyBatch({})\n\n # The resolver and catalog stuff below is to prevent BioMed files from loading external DTDs\n # (which is not only slow but also unreliable)\n classPath = \"/apps/eschol/erep/xtf/WEB-INF/lib/saxonb-8.9.jar:\" +\n \"/apps/eschol/erep/xtf/control/xsl/jing.jar:\" +\n \"/apps/eschol/erep/xtf/normalization/resolver.jar\"\n Nailgun.run(classPath, 0, \"-Dxml.catalog.files=/apps/eschol/erep/xtf/normalization/catalog.xml\") { |nailgun|\n loop do\n # Grab an item from the input queue\n Thread.current[:name] = \"index thread\" # label all stdout from this thread\n itemID = $indexQueue.pop\n itemID or break\n\n # Extract data and index it (in batches)\n begin\n Thread.current[:name] = \"index thread: #{itemID}\" # label all stdout from this thread\n indexItem(itemID, batch, nailgun)\n rescue Exception => e\n puts \"Error indexing item #{itemID}\"\n raise\n end\n\n # To avoid Saxon's Java process from growing gigantic, restart it once in a while.\n nailgun.callCount == 1000 and nailgun.restart\n end\n }\n\n # Finish off the last batch.\n batch[:items].empty? or $batchQueue << batch\n rescue Exception => e\n if $preindexMode\n raise e\n else\n puts \"Exception in indexAllItems: #{e} #{e.backtrace}\"\n end\n ensure\n $batchQueue << nil # marker for end-of-queue\n end\nend",
"def index\n @notes = collection_based_on_path(request.path_parameters)\n @notes = custom_index_sort if params[:sort]\n render(:index)\n end",
"def index\n @documents = Document.where(project_id: session[:current_project_id]).order(:identifier)\n @project = current_project\n end",
"def index\n @documents = Document.all.delete_if { |document| cannot? :read, document }\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @documents }\n format.xml { render xml: @documents }\n end\n end",
"def index\n ## BUILD MASTER HASH WITH ALL SUB-PARAMS ##\n allparams = {}\n allparams[:mainmodel] = NodeRackNodeAssignment\n allparams[:webparams] = params\n results = Search.new(allparams).search\n\n flash[:error] = results[:errors].join('<br />') unless results[:errors].empty?\n includes = results[:includes]\n results[:requested_includes].each_pair{|k,v| includes[k] = v}\n @objects = results[:search_results]\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @objects.to_xml(:dasherize => false) }\n end\n end",
"def index_data(*models)\n if models.nil? || models.empty?\n only_index = @indexed_models\n else\n only_index = models.collect{|m| m.to_s}\n end \n \n Mebla.log(\"Indexing #{only_index.join(\", \")}\", :debug)\n \n # Build up a bulk query to save processing and time\n bulk_query = \"\"\n # Keep track of indexed documents\n indexed_count = {}\n \n # Create the index\n if create_index\n # Start collecting documents\n only_index.each do |model|\n Mebla.log(\"Indexing: #{model}\")\n # Get the class\n to_index = model.camelize.constantize\n \n # Get the records \n entries = []\n unless to_index.embedded?\n if to_index.sub_class?\n entries = to_index.any_in(:_type => [to_index.name])\n else \n entries = to_index.any_in(:_type => [nil, to_index.name])\n end\n else\n parent = to_index.embedded_parent\n access_method = to_index.embedded_as\n \n parent.all.each do |parent_record|\n if to_index.sub_class?\n entries += parent_record.send(access_method.to_sym).any_in(:_type => [to_index.name])\n else\n entries += parent_record.send(access_method.to_sym).any_in(:_type => [nil, to_index.name])\n end\n end\n end\n \n # Save the number of entries to be indexed\n indexed_count[model] = entries.count \n \n # Build the queries for this model \n entries.each do |document|\n attrs = {} #document.attributes.dup # make sure we dont modify the document it self\n attrs[:id] = document.attributes[\"_id\"] # the id is already added in the meta data of the action part of the query\n \n # only index search fields and methods\n document.class.search_fields.each do |field|\n if document.attributes.keys.include?(field.to_s)\n attrs[field] = document.attributes[field.to_s] # attribute\n else\n attrs[field] = document.send(field) # method\n end\n end\n \n # index relational fields\n document.class.search_relations.each do |relation, fields| \n items = document.send(relation.to_sym) # get the relation document\n \n next if items.nil?\n \n # N relation side\n if items.is_a?(Array) || items.is_a?(Mongoid::Relations::Targets::Enumerable)\n next if items.empty?\n attrs[relation] = []\n items.each do |item|\n if fields.is_a?(Array) # given multiple fields to index\n fields_values = {}\n fields.each do |field|\n if item.attributes.keys.include?(field.to_s)\n fields_values.merge!({ field => item.attributes[field.to_s] }) # attribute\n else\n fields_values.merge!({ field => item.send(field) }) # method\n end\n end\n attrs[relation] << fields_values\n else # only index one field in the relation\n if item.attributes.keys.include?(fields.to_s)\n attrs[relation] << { fields => item.attributes[fields.to_s] } # attribute\n else\n attrs[relation] << { fields => item.send(fields) } # method\n end\n end\n end\n # 1 relation side\n else\n attrs[relation] = {}\n if fields.is_a?(Array) # given multiple fields to index\n fields_values = {}\n fields.each do |field|\n if items.attributes.keys.include?(field.to_s)\n fields_values.merge!({ field => items.attributes[field.to_s] }) # attribute\n else\n fields_values.merge!({ field => items.send(field) }) # method\n end\n end\n attrs[relation].merge!(fields_values)\n else # only index one field in the relation\n if items.attributes.keys.include?(fields.to_s)\n attrs[relation].merge!({ fields => items.attributes[fields.to_s] }) # attribute\n else\n attrs[relation].merge!({ fields => items.send(fields) }) # method\n end\n end\n end\n end \n \n # If embedded get the parent id\n if document.embedded?\n parent_id = document.send(document.class.embedded_parent_foreign_key.to_sym).id.to_s \n attrs[(document.class.embedded_parent_foreign_key + \"_id\").to_sym] = parent_id\n attrs[:_parent] = parent_id\n \n # Build add to the bulk query\n bulk_query << build_bulk_query(@slingshot_index_name, to_index.slingshot_type_name, document.id.to_s, attrs, parent_id)\n else\n # Build add to the bulk query\n bulk_query << build_bulk_query(@slingshot_index_name, to_index.slingshot_type_name, document.id.to_s, attrs)\n end\n end\n end\n else\n raise Mebla::Errors::MeblaIndexException.new(\"Could not create #{@slingshot_index_name}!!!\")\n end \n \n Mebla.log(\"Bulk indexing:\\n#{bulk_query}\", :debug) \n \n # Send the query\n response = Slingshot::Configuration.client.post \"#{Mebla::Configuration.instance.url}/_bulk\", bulk_query\n \n # Only refresh the index if no error ocurred\n unless response =~ /error/ \n # Log results\n Mebla.log(\"Indexed #{only_index.count} model(s) to #{self.slingshot_index_name}: #{response}\")\n Mebla.log(\"Indexing Report:\")\n indexed_count.each do |model_name, count|\n Mebla.log(\"Indexed #{model_name}: #{count} document(s)\")\n end\n \n # Refresh the index\n refresh_index\n else\n raise Mebla::Errors::MeblaIndexException.new(\"Indexing #{only_index.join(\", \")} failed with the following response:\\n #{response}\")\n end\n rescue RestClient::Exception => error\n raise Mebla::Errors::MeblaIndexException.new(\"Indexing #{only_index.join(\", \")} failed with the following error: #{error.message}\")\n end",
"def run(content, params={})\n # only process item that are changed since last regeneration\n if (!@last_indexed.nil? && @last_indexed > item.mtime)\n return content\n end\n\n puts \"Indexing page #{@item.identifier}\"\n\n while not @index.running?\n # wait for the indextank index to get ready\n sleep 0.5\n end\n\n page_text = extract_text(content)\n\n @index.document(@item.identifier).add({\n :text => page_text,\n :title => @item[:title] || item.identifier\n })\n puts 'Indexed ' << item.identifier\n\n @last_indexed = Time.now\n write_last_indexed\n\n content\n end",
"def index\n @doc_templates = DocTemplate.all\n end",
"def index\n #@site_pages = SitePage.all\n end",
"def generate(site)\n\n @site = site\n index_dest = @site.instance_variable_get(:@dest)\n rebuild = @module_config['rebuild']\n index_file = index_dest + @module_config['index_file']\n\n if plugin_disabled?\n Jekyll.logger.info 'J1 Lunr:', 'disabled'\n return\n else\n Jekyll.logger.info 'J1 Lunr:', 'enabled'\n Jekyll.logger.info 'J1 Lunr:', 'generate search index'\n end\n\n if @module_config['rebuild'] == false\n if File.exist?(index_file)\n Jekyll.logger.info 'J1 Lunr:', 'rebuild index disabled'\n # Keep the index file from being cleaned by Jekyll\n #\n site.static_files << SearchIndexFile.new(site, site.dest, '/', @module_config['index_file'])\n return\n end\n end\n\n # gather posts and pages\n #\n items = pages_to_index(site)\n content_renderer = PageRenderer.new(site)\n # index = []\n # rebuild = @module_config['rebuild']\n\n index_js = open(@lunr_path).read\n\n # NOTE: all settings must be added within the index function\n #\n index_js << 'var idx = lunr(function() {'\n\n @lunr_config['fields'].each_pair do |name, boost|\n index_js << \"this.field('#{name}', {'boost': #{boost}});\"\n end\n\n items.each_with_index do |item, i|\n entry = SearchEntry.create(item, content_renderer)\n\n entry.strip_index_suffix_from_url! if @strip_index_html\n entry.strip_stopwords!(stopwords, @min_length) if File.exists?(@stopwords_file)\n\n doc = {\n 'id' => i,\n 'title' => entry.title,\n 'tagline' => entry.tagline,\n 'url' => entry.url,\n 'date' => entry.date,\n 'tags' => entry.tags,\n 'categories' => entry.categories,\n 'description' => entry.description,\n 'is_post' => entry.is_post,\n 'body' => entry.body\n }\n\n # remove unwanted categories (if any)\n #\n doc['categories'] -= @strip_categories unless doc['categories'] == nil\n\n index_js << 'this.add(' << ::JSON.generate(doc, quirks_mode: true) << ');'\n\n # reduce the size of the doc array by deleting the body key\n #\n doc.delete('body')\n @docs[i] = doc\n\n end\n index_js << '});'\n\n filename = File.join(@index_dir, \"#{@index_name}\")\n ctx = ExecJS.compile(index_js)\n index = ctx.eval('JSON.stringify(idx)')\n\n total = {\n 'docs' => @docs,\n 'index' => ::JSON.parse(index, {:max_nesting => false})\n }\n filepath = File.join(site.dest, filename)\n\n # create data path if not already exists\n #\n FileUtils.mkdir_p(File.dirname(filepath))\n File.open(filepath, 'w') { |f| f.write(JSON.dump(total)) }\n# Jekyll.logger.info 'J1 Lunr:', \"finished, index ready.\"\n added_files = [filename]\n\n # Keep the written files from being cleaned by Jekyll\n #\n added_files.each do |fname|\n site.static_files << SearchIndexFile.new(site, site.dest, '/', fname)\n end\n end",
"def generate_index\n setup\n\n template_file = @template_dir + 'index.rhtml'\n return unless template_file.exist?\n\n debug_msg \"Rendering the index page...\"\n\n out_file = @base_dir + @options.op_dir + 'index.html'\n rel_prefix = @outputdir.relative_path_from out_file.dirname\n search_index_rel_prefix = rel_prefix\n search_index_rel_prefix += @asset_rel_path if @file_output\n\n asset_rel_prefix = rel_prefix + @asset_rel_path\n\n @title = @options.title\n\n render_template template_file, out_file do |io|\n here = binding\n # suppress 1.9.3 warning\n here.local_variable_set(:asset_rel_prefix, asset_rel_prefix)\n here\n end\n rescue => e\n error = RDoc::Error.new \\\n \"error generating index.html: #{e.message} (#{e.class})\"\n error.set_backtrace e.backtrace\n\n raise error\n end",
"def index\n @page_elements = @page.page_elements\n if (@page)\n @mainelements = @page.page_elements.where(visible: true,sidebar: false)\n @sidebarelements = @page.page_elements.where(visible:true,sidebar: true)\n end\n if(@mainelemnents) && (@mainelements.count > 1)\n @mainelements = @mainelements.sort_by{ |x| x[:displayIndex]}\n end\n @poster = @council.posts\n @rest = @page.page_elements.where(visible: false)\n end",
"def default_index_subaction\n list_all\n end",
"def do_local_indexing(solr_doc); end",
"def index\n Index.new(root, paths, extensions, aliases)\n end",
"def generate_all_classes_index(site)\n puts \"55555555555555555555555555555555555555555555555555555555\"\n allClasses = AllClassesIndex.new(site, site.source, \"/pages/classes\")\n allClasses.render(site.layouts, site.site_payload)\n allClasses.write(site.dest)\n\n site.pages << allClasses\n site.static_files << allClasses\n end",
"def index\n @content_mappings = ContentMapping.find(:all, :order => 'page_id ASC, section_id ASC')\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @content_mappings }\n end\n end",
"def index\n if @full_index\n @controller.index\n else\n @controller.index @indexes\n end\n end",
"def process(pages)\n robot = Robots.new USER_AGENT\n until pages.nil? or pages.empty? \n newfound_pages = []\n pages.each { |page|\n begin\n if add_to_index?(page) then \n uri = URI.parse(page)\n host = \"#{uri.scheme}://#{uri.host}\"\n open(page, \"User-Agent\" => USER_AGENT) { |s|\n (Hpricot(s)/\"a\").each { |a| \n url = scrub(a.attributes['href'], host)\n newfound_pages << url unless url.nil? or !robot.allowed? url or newfound_pages.include? url\n }\n } \n end\n rescue => e \n print \"\\n** Error encountered crawling - #{page} - #{e.to_s}\"\n rescue Timeout::Error => e\n print \"\\n** Timeout encountered - #{page} - #{e.to_s}\"\n end\n }\n pages = newfound_pages\n File.open(LAST_CRAWLED_PAGES, 'w') { |out| YAML.dump(newfound_pages, out) }\n end \n end",
"def index\n @item_not_includeds = ItemNotIncluded.all\n end",
"def index\n\t\tprepare_variable_for_index_template\n\tend",
"def index\n @sites = Site.where(:reference => false, :page_rank.gt => -1).sort(:name).all\n @reference_sites = Site.where(:reference => true).sort(:name).all\n respond_with(@sites)\n end"
] | [
"0.67897886",
"0.6443216",
"0.63677955",
"0.6316591",
"0.6270402",
"0.60933906",
"0.6065452",
"0.6006128",
"0.59498966",
"0.5900518",
"0.58711773",
"0.5818041",
"0.58163923",
"0.5811085",
"0.5801141",
"0.57747304",
"0.5763593",
"0.57465756",
"0.5745121",
"0.5709401",
"0.569746",
"0.5687432",
"0.56834954",
"0.5673243",
"0.56666136",
"0.56629086",
"0.5657137",
"0.5649897",
"0.5641522",
"0.5629696",
"0.5608042",
"0.55884975",
"0.5580087",
"0.55780756",
"0.55693513",
"0.55482936",
"0.55333304",
"0.55315995",
"0.55286753",
"0.5523829",
"0.5514432",
"0.5497702",
"0.5495633",
"0.54916507",
"0.54753494",
"0.54711056",
"0.5465779",
"0.5441548",
"0.54323125",
"0.5423268",
"0.5418955",
"0.54135466",
"0.54101795",
"0.5409371",
"0.54074514",
"0.54033446",
"0.5399331",
"0.5394834",
"0.5386095",
"0.53836346",
"0.53821945",
"0.537985",
"0.53792846",
"0.53766817",
"0.5373613",
"0.5367233",
"0.5364798",
"0.5364403",
"0.53597754",
"0.5355076",
"0.5354852",
"0.5354059",
"0.53450996",
"0.5343106",
"0.5329161",
"0.5326112",
"0.5324336",
"0.5324307",
"0.5320216",
"0.5315695",
"0.53150237",
"0.5302965",
"0.53019786",
"0.52957356",
"0.529495",
"0.52932805",
"0.52929723",
"0.52915597",
"0.5290881",
"0.5290717",
"0.5280329",
"0.52734256",
"0.5271969",
"0.5271059",
"0.52705026",
"0.5268639",
"0.5266951",
"0.52668387",
"0.5266248",
"0.5265768"
] | 0.54789686 | 44 |
Changes fill color of cell | def change_fill(rgb = 'ffffff')
validate_worksheet
Color.validate_color(rgb)
self.style_index = workbook.modify_fill(self.style_index, rgb)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fill(color)\n @style[:fill] = color\n end",
"def fill_color(color)\n end",
"def change_fill(rgb='ffffff')\n validate_worksheet\n Color.validate_color(rgb)\n @style_index = modify_fill(@workbook, @style_index,rgb)\n end",
"def setfillcolorind(*)\n super\n end",
"def fillColor=(color)\n @fill_color = color.CGColor\n self.setNeedsDisplay\n end",
"def fill(color)\n @window.fill_with(color)\n end",
"def fill_color()\n validate_worksheet\n xf = @workbook.get_style_attributes(@workbook.get_style(@style_index))\n return @workbook.get_fill_color(xf)\n end",
"def fill(pattern)\n @style[:fill] = pattern(pattern)\n end",
"def fill_color(color=nil)\n cur_page.fill_color(color)\n end",
"def change_row_fill(row=0,rgb='ffffff')\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n Color.validate_color(rgb)\n if @row_styles[(Integer(row)+1).to_s].nil?\n @row_styles[(Integer(row)+1).to_s] = {}\n @row_styles[(Integer(row)+1).to_s][:style] = '0'\n end\n\n @row_styles[(Integer(row)+1).to_s][:style] = modify_fill(@workbook,Integer(@row_styles[(Integer(row)+1).to_s][:style]),rgb)\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n c.change_fill(rgb)\n end\n end\n end",
"def set_fill_color_a(color = RFPDF::COLOR_PALETTE[:white], colorspace = :rgb)\n if colorspace == :cmyk\n SetCmykFillColor(color[0], color[1], color[2], color[3])\n else\n SetFillColor(color[0], color[1], color[2])\n end\n end",
"def fill=(value)\n @fill = value\n end",
"def shading_colour=(colour)\n self.each {|cell| cell.shading_colour = colour}\n end",
"def fill_cell(loc = {}, fill_color = BLUE, opacity = 255)\n fill_color.alpha = opacity\n tl, tr, bl, br = pixel_top_left(loc[:x], loc[:y]), pixel_top_right(loc[:x], loc[:y]),\n pixel_bottom_left(loc[:x], loc[:y]), pixel_bottom_right(loc[:x], loc[:y])\n make_quad(tl, tr.left(1), bl.up(1), br.left(1).up(1), fill_color, opacity, loc[:z])\n end",
"def color(val)\n raise 'Please provide a valid cell color' unless ('A'..'Z').include?(val)\n self.value = val\n end",
"def color_reset!(fill)\n save = background_color\n # Change the background color _outside_ the begin block\n # so that if this object is frozen the exeception will be\n # raised before we have to handle it explicitly.\n self.background_color = fill\n begin\n erase!\n ensure\n self.background_color = save\n end\n self\n end",
"def fill(colorspec)\n primitive \"fill #{enquote(colorspec)}\"\n end",
"def setfillstyle(*)\n super\n end",
"def modify_fill(workbook, style_index, rgb)\n xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(xf_obj)\n #modify fill array\n fill_id = xf[:fillId]\n\n fill = workbook.fills[fill_id.to_s][:fill]\n if workbook.fills[fill_id.to_s][:count] > 1 || fill_id == 0 || fill_id == 1\n old_size = workbook.fills.size.to_s\n workbook.fills[old_size] = {}\n workbook.fills[old_size][:fill] = deep_copy(fill)\n workbook.fills[old_size][:count] = 1\n workbook.fills[fill_id.to_s][:count] -= 1\n\n change_wb_fill(workbook, old_size,rgb)\n\n #modify styles array\n fill_id = old_size\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fillId] = fill_id\n xf[:applyFill] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1\n else\n change_wb_fill(workbook, fill_id.to_s,rgb)\n return style_index\n end\n end",
"def set_svg_fill_color(xml, color)\n # Find all primitives.\n primitives = ['path', 'circle', 'rect', 'line', 'ellipse', 'polyline', 'polygon', 'text']\n \n # Strip existing fills.\n primitives.each do |primitive|\n xml.xpath(\"//xmlns:#{primitive}\").each do |child|\n child['fill'] = \"##{color}\"\n end\n end\n end",
"def with_color(color=\"999999\", &block)\n fill_color color\n yield if block_given?\n # Reset the color to the default\n fill_color grey\n end",
"def inqfillcolorind\n inquiry_int { |pt| super(pt) }\n end",
"def inqfillcolorind\n inquiry_int { |pt| super(pt) }\n end",
"def set_cell(x, y, color)\n get_cell(x, y).color = color\n @last_cell_played = get_cell(x, y)\n end",
"def color=(color)\n each_with_index do |cell, index|\n cell.color = color.is_a?(Array) ? color[index] : color\n end\n end",
"def fill_setup(gc)\n if @obj.fill\n gc.set_background @obj.fill\n gc.set_alpha @obj.fill_alpha\n true\n end\n end",
"def fill_region(x, y, colour)\n return if invalid?(x,y)\n x = xform(x)\n y = xform(y)\n target_colour = @matrix.element(y, x)\n @matrix.flood_fill(x, y, target_colour, colour)\n end",
"def color_fill_to_border(x, y, fill)\n color_flood_fill(border_color, fill, x, y, Magick::FillToBorderMethod)\n end",
"def color= color\n @marker_fill_color = color\n @marker_border_color = color\n end",
"def fill(r=0, g=0, b=0, a=1)\n case r\n when Color\n g = r.g\n b = r.b\n a = r.a\n r = r.r\n end\n CGContextSetRGBFillColor(@ctx, r, g, b, a) # RGBA\n @fill = true\n end",
"def update_fill\n end",
"def fill(row, column, colour, memo = [])\n start_colour = self[row, column]\n self[row, column] = colour\n\n memo.push([row, column]) # adds coordinates to memo\n p memo\n (row - 1..row + 1).each do |i|\n (column - 1..column + 1).each do |j|\n next unless valid_coordinates?(i, j)\n next if memo.include? [i, j]\n next unless self[i, j] == start_colour\n\n fill(i, j, colour, memo)\n end\n end\n end",
"def colorize canvas, color\n canvas.itemconfigure tag, :fill => color\n rescue => ex\n if ex.message =~ /unknown option \"-fill\"/\n extend CarefulColorize\n colorize canvas, color\n else\n raise\n end\n end",
"def setfillintstyle(*)\n super\n end",
"def highlight_cell(tbl, row, col, id, check: false)\n bg_color = '&#ade6dd' unless check\n\n tbl[row + 1][col + 1] = { content: id,\n check: check,\n class: 'td-full-slot',\n style: {'background-color'=> bg_color}}\n end",
"def fill_paint(paint)\n end",
"def fill_column(x, y1, y2, colour)\n return if invalid?(x,y1) || invalid?(x,y2)\n y1.upto(y2) do |i|\n colour(x,i,colour)\n end\n end",
"def set_color(piece)\n # sets color to red if at cursor [row, col]\n if cursor.cursor_pos == piece.position\n to_color = {:color => :light_blue, :background => :red}\n # otherwise color is blue\n else\n to_color = :blue\n end\n\n to_color\n end",
"def fill\n fit\n rounding_error\n border_x = (@x - rmagick_img.columns)/2\n border_y = (@y - rmagick_img.rows)/2\n\n rmagick_img.border!(border_x,border_y,\"white\")\n end",
"def fill_and_stroke\n cur_page.fill_and_stroke\n end",
"def paint( color )\n self.update(color: color)\n end",
"def fill\n \t@fill\n end",
"def setcolorrep(*)\n super\n end",
"def flood_fill(x, y, color)\n x_i, y_i = pixel_to_index(x, y)\n original_color = bitmap[y_i][x_i]\n flood_fill_helper(x, y, original_color, color)\n end",
"def bg_color_for cell:, algo: :rotate_hue, scalar: 1.25\n\t\t\tself.send(algo, cell: cell, scalar: scalar)\n\t\tend",
"def color_floodfill(x, y, fill)\n target = pixel_color(x, y)\n color_flood_fill(target, fill, x, y, Magick::FloodfillMethod)\n end",
"def SetFillColor(r, g=-1, b=-1, storeprev=false)\n\t\t#Set color for all filling operations\n\t\tif ((r==0 and g==0 and b==0) or g==-1)\n\t\t\t@fill_color=sprintf('%.3f g', r/255.0);\n\t\telse\n\t\t\t@fill_color=sprintf('%.3f %.3f %.3f rg', r/255.0, g/255.0, b/255.0);\n\t\tend\n\t\t@color_flag=(@fill_color!=@text_color);\n\t\tif (@page>0)\n\t\t\tout(@fill_color);\n\t\tend\n\t\tif (storeprev)\n\t\t\t# store color as previous value\n\t\t\t@prevfill_color = [r, g, b]\n\t\tend\n\tend",
"def color\n @marker_fill_color || @marker_border_color || :default\n end",
"def paint_fill(screen, point, new_color, old_color = nil)\n x = point[0]\n y = point[1]\n\n old_color ||= screen[y][x]\n if screen[y][x] != old_color\n return\n else\n screen[y][x] = new_color\n paint_fill(screen, [x, y + 1], new_color, old_color) if y + 1 < screen.length\n paint_fill(screen, [x, y - 1], new_color, old_color) if y - 1 >= 0\n paint_fill(screen, [x + 1, y], new_color, old_color) if x + 1 < screen[0].length\n paint_fill(screen, [x - 1, y], new_color, old_color) if x - 1 >= 0\n end\n screen\nend",
"def background_fill\n @background_fill ||= begin\n digest = \"0.#{Digest::MD5.hexdigest(@name).to_i(16).to_s}\".to_f\n index = (digest * (@background_colors.length - 1)).round\n @background_colors[index]\n end\n end",
"def clear(fill_color = @fill)\n color = fill_color || @fill\n ext_clear([color.r, color.g, color.b, color.a])\n update_texture if @update\n end",
"def fill\n self.each {|image| image.fill}\n end",
"def fillrect(*)\n super\n end",
"def paint_fill(screen, point, new_color)\n seen = {}\n old_color = color(point, screen)\n to_fill = [point]\n\n until to_fill.empty? do\n current_point = to_fill.shift\n screen[current_point[0]][current_point[1]] = new_color\n neighbors(current_point, screen.length, screen.first.length).each do |neighbor|\n if !seen[neighbor] && color(neighbor, screen) == old_color\n to_fill.push(neighbor)\n seen[neighbor] = true\n end\n end\n end\n\n render screen\nend",
"def colour(x, y, colour)\n return if invalid?(x, y)\n x = xform(x)\n y = xform(y)\n @matrix[y, x] = colour\n end",
"def cellarray(xmin, xmax, ymin, ymax, dimx, dimy, color)\n super(xmin, xmax, ymin, ymax, dimx, dimy, 1, 1, dimx, dimy, int(color))\n end",
"def cellarray(xmin, xmax, ymin, ymax, dimx, dimy, color)\n super(xmin, xmax, ymin, ymax, dimx, dimy, 1, 1, dimx, dimy, int(color))\n end",
"def change_bg_color(color)\n @bg_color = color\n self.bitmap.fill_rect(self.bitmap.rect,color)\n end",
"def set_draw_color_a(color = RFPDF::COLOR_PALETTE[:black])\n SetDrawColor(color[0], color[1], color[2])\n end",
"def circle_filled(x, y, r)\n\t\t@screen.drawAAFilledCircle x, y, r, @color_fg\n\tend",
"def gradient_fill_region(x, y, a, b, c)\n return if invalid?(x, y)\n fill_region(x, y, c)\n colour(x, y, a)\n x = x.to_i\n y = y.to_i\n conditional_colour(x-1, y, b, c)\n conditional_colour(x+1, y, b, c)\n conditional_colour(x, y-1, b, c)\n conditional_colour(x, y+1, b, c)\n conditional_colour(x-1, y-1, b, c)\n conditional_colour(x+1, y-1, b, c)\n conditional_colour(x-1, y+1, b, c)\n conditional_colour(x+1, y+1, b, c) \n end",
"def prepare_fills # :nodoc:\n fills = {}\n index = 2 # Start from 2. See above.\n\n # Add the default fills.\n fills['0:0:0'] = 0\n fills['17:0:0'] = 1\n\n # Store the DXF colors separately since them may be reversed below.\n @dxf_formats.each do |format|\n next unless format.pattern != 0 || format.bg_color != 0 || format.fg_color != 0\n\n format.has_dxf_fill(true)\n format.dxf_bg_color = format.bg_color\n format.dxf_fg_color = format.fg_color\n end\n\n @xf_formats.each do |format|\n # The following logical statements jointly take care of special cases\n # in relation to cell colours and patterns:\n # 1. For a solid fill (_pattern == 1) Excel reverses the role of\n # foreground and background colours, and\n # 2. If the user specifies a foreground or background colour without\n # a pattern they probably wanted a solid fill, so we fill in the\n # defaults.\n #\n if format.pattern == 1 && ne_0?(format.bg_color) && ne_0?(format.fg_color)\n format.fg_color, format.bg_color = format.bg_color, format.fg_color\n elsif format.pattern <= 1 && ne_0?(format.bg_color) && eq_0?(format.fg_color)\n format.fg_color = format.bg_color\n format.bg_color = 0\n format.pattern = 1\n elsif format.pattern <= 1 && eq_0?(format.bg_color) && ne_0?(format.fg_color)\n format.bg_color = 0\n format.pattern = 1\n end\n\n key = format.get_fill_key\n\n if fills[key]\n # Fill has already been used.\n format.fill_index = fills[key]\n format.has_fill(false)\n else\n # This is a new fill.\n fills[key] = index\n format.fill_index = index\n format.has_fill(true)\n index += 1\n end\n end\n\n @fill_count = index\n end",
"def set_cellval(x,y,value, opts = {:color => 0})\n cell = @ole_worksheet.Cells.Item(x, y)\n cell.Interior.ColorIndex = opts[:color] # 42 - aqua-marin, 4-green\n @workbook.modified_cells << cell if @workbook\n cell.Value = value\n rescue WIN32OLERuntimeError\n raise RangeNotEvaluatable, \"cannot assign value #{value.inspect} to cell (#{y.inspect},#{x.inspect})\"\n end",
"def fill_row(x1, x2, y, colour)\n return if invalid?(x1,y) || invalid?(x2,y)\n x1.upto(x2) do |i|\n colour(i,y,colour)\n end\n end",
"def F(args)\n return unless check_dimensions(args, 3)\n cell = Cell.new *check_integers(args[0..1])\n return unless cell\n new_colour = args[2]\n existing_colour = @image.get_colour(cell)\n return if existing_colour == new_colour\n check_cells = [cell]\n until check_cells.empty?\n new_cells = fill(check_cells.pop, existing_colour, new_colour)\n check_cells.push(*new_cells.select{ |cell| @image.in?(cell)})\n end\n end",
"def color=(theColor)\n @shadowHash[:fillcolor] = theColor\n end",
"def recolor(color)\n @color = color\n self\n end",
"def floodFill(col, row)\r\n (col > @columns-1 || col < 0 || row > @rows-1 || row < 0) && return #Returns if the tile index is outside of the grid bounds.\r\n @tile[col][row].revealed && return #Returns if the tile is already revealed.\r\n\r\n @tile[col][row].revealed = true #Marks the tile as revealed.\r\n @hiddenCount -= 1\r\n adjacent = @tile[col][row].adjacent #Gets the adjacent count for the tile.\r\n\r\n #Reveal the adjacent count of the tile.\r\n old = @tile[col][row].btn\r\n newStyle = old.style.dup\r\n old.parent.before(old) do\r\n @btn = button(adjacent.to_s, newStyle)\r\n end\r\n old.remove\r\n\r\n #Recursively calls flood fill for the surrounding tiles.\r\n if (@tile[col][row].adjacent == 0)\r\n floodFill(col+1,row+1)\r\n floodFill(col+1,row)\r\n floodFill(col+1,row-1)\r\n floodFill(col,row+1)\r\n floodFill(col,row-1)\r\n floodFill(col-1,row+1)\r\n floodFill(col-1,row)\r\n floodFill(col-1,row-1)\r\n end\r\n\r\nend",
"def highlightCell\n disptype = @colvalues[@col]\n highlight = @highlight\n infolen = @info[@row][@col].size\n \n # Given the dominance of the color/attributes, we need to set the\n # current cell attribute.\n if @dominant == CDK::ROW\n highlight = (@rowtitle[@crow][0] || 0) & Ncurses::A_ATTRIBUTES\n elsif @dominant == CDK::COL\n highlight = (@coltitle[@ccol][0] || 0) & Ncurses::A_ATTRIBUTES\n end\n\n # If the column is only one char.\n (1..@colwidths[@ccol]).each do |x|\n ch = if x <= infolen && !Display.isHiddenDisplayType(disptype)\n then CDK.CharOf(@info[@row][@col][x - 1])\n else @filler\n end\n self.CurMatrixCell.mvwaddch(1, x, ch.ord | highlight)\n end\n self.CurMatrixCell.wmove(1, infolen + 1)\n self.CurMatrixCell.wrefresh\n end",
"def foreground_fill\n red = background_fill[1..2].to_i(16)\n green = background_fill[3..4].to_i(16)\n blue = background_fill[5..6].to_i(16)\n (red * 0.299 + green * 0.587 + blue * 0.114) > 186 ? '#000000' : '#FFFFFF'\n end",
"def floodfill(x, y, color)\n if @pixels[y-1][x-1] != color\n @pixels[y-1][x-1] = color\n floodfill(x+1, y, color)\n floodfill(x-1, y, color)\n floodfill(x, y + 1, color)\n floodfill(x, y - 1, color)\n end\n end",
"def change_grid(x:, y:, color:)\n return if (x > max_x) || (x < 0)\n return if (y > max_y) || (y < 0)\n col = effective_color(x: x, y: y, color: color)\n grid_apply_color(x, y, col)\n end",
"def new_fill(value)\n self.class.new(*shape).fill(value)\n end",
"def set_fill\n @fill = Fill.find(params[:id])\n end",
"def colour(x, y, c)\n x, y = x.to_i, y.to_i\n\n unless self.bounds?(x, y)\n puts \"ERR: Coordinates out of bounds\"\n return\n end\n\n self[y, x] = c\n end",
"def SetCmykFillColor(c, m, y, k, storeprev=false)\n\t\t#Set color for all filling operations\n\t\t@fill_color=sprintf('%.3f %.3f %.3f %.3f k', c, m, y, k);\n\t\t@color_flag=(@fill_color!=@text_color);\n\t\tif (storeprev)\n\t\t\t# store color as previous value\n\t\t\t@prevtext_color = [c, m, y, k]\n\t\tend\n\t\tif (@page>0)\n\t\t\tout(@fill_color);\n\t\tend\n\tend",
"def matte_fill_to_border(x, y)\n f = copy\n f.alpha(OpaqueAlphaChannel) unless f.alpha?\n f.matte_flood_fill(border_color, x, y, FillToBorderMethod, alpha: TransparentAlpha)\n end",
"def spray_paint(new_color)\n self.color = new_color\n end",
"def process(doc)\n doc.css('[fill^=\"url(#\"]').attr(\"fill\", \"#ffffff\")\n doc.css('[style*=\"fill:url(#\"]').each do |el|\n style = el.attribute(\"style\").to_s\n updated_style = style.gsub(/fill:url\\([^\\)]*\\)/, \"fill:#ffffff\")\n el[\"style\"] = updated_style\n end\nend",
"def draw_fill_rect(rect, color, _fill_type = fill_type)\n bmp = self.bitmap\n rect = Convert.Rect(rect) unless rect.is_a?(Rect) # Rect#try_cast\n return if rect.empty?\n case _fill_type\n ## default style\n when :null\n when :flat, :default\n bmp.fill_rect(rect, color)\n when :round\n bmp.round_fill_rect(rect, color)\n when :blend\n bmp.blend_fill_rect(rect, color)\n when :blend_round\n bmp.round_blend_fill_rect(rect, color)\n when :smooth, :smooth_round\n ## smooth style\n color2 = color.blend.darken(0.08) # kinda heavy\n r1 = rect.dup\n r2 = r1.contract(anchor: border_anchor, amount: border_size)\n if _fill_type == :smooth\n ## block\n bmp.blend_fill_rect(r1, color2)\n bmp.blend_fill_rect(r2, color)\n elsif _fill_type == :smooth_round\n ## rounded\n bmp.round_blend_fill_rect(r1, color2)\n bmp.round_blend_fill_rect(r2, color)\n end\n else\n raise ArgumentError, \"invalid fill_type #{_fill_type}\"\n end\n return rect\n end",
"def fill_rect(*args)\n raise \"not implemented\"\n end",
"def strokeColor=(color)\n @stroke_color = color.CGColor\n self.setNeedsDisplay\n end",
"def fill\n return @fill\n end",
"def fill_graphicsblock(*args)\n @p.fill_graphicsblock(self, *args)\n end",
"def floodfill(pixel, target_color, replacement_color)\n return unless pixel\n return if pixel.color == replacement_color\n return if pixel.color != target_color\n return if target_color != BLANK_COLOR && @pixel_lock # don't replace non-blank pixels with color if pixels are locked\n\n pixel.color = replacement_color\n @canvas_changed = true\n\n # UP\n _pixel = get_pixel_at(pixel.x, pixel.y - @grid_pixel_size)\n floodfill(_pixel, target_color, replacement_color)\n\n # DOWN\n _pixel = get_pixel_at(pixel.x, pixel.y + @grid_pixel_size)\n floodfill(_pixel, target_color, replacement_color)\n\n # LEFT\n _pixel = get_pixel_at(pixel.x - @grid_pixel_size, pixel.y)\n floodfill(_pixel, target_color, replacement_color)\n\n # RIGHT\n _pixel = get_pixel_at(pixel.x + @grid_pixel_size, pixel.y)\n floodfill(_pixel, target_color, replacement_color)\n end",
"def highlight\n\t\tpushStyle\n\t\t\tno_stroke\n\t\t\tfill 14, 70, 70\n\t\t\tellipse(@node_x, @node_y, @@node_size + @@expand, @@node_size + @@expand)\n\t\tpopStyle\n\tend",
"def recolor(tag, color)\r\n @listarea.tag_configure(tag, 'foreground' => color)\r\n end",
"def set_neighbour_colour(col, row)\n xd = rand(2)\n @blocks[row][col] = @blocks[row - (1 - xd)][col - xd]\n end",
"def update!(**args)\n @border = args[:border] if args.key?(:border)\n @fill = args[:fill] if args.key?(:fill)\n @shape = args[:shape] if args.key?(:shape)\n end",
"def color_point(x, y, fill)\n f = copy\n f.pixel_color(x, y, fill)\n f\n end",
"def flood_fill(image, sr, sc, new_color)\r\n queue = [[sr, sc]]\r\n old_color = image[sr][sc]\r\n until queue.empty? do \r\n start = queue.pop\r\n x, y = start\r\n image[x][y] = new_color\r\n queue << [x, y+1] if y+1 < image[x].length && image[x][y+1] == old_color\r\n queue << [x, y-1] if y-1 >= 0 && image[x][y-1] == old_color\r\n queue << [x+1, y] if x+1 < image.length && image[x+1][y] == old_color\r\n queue << [x-1, y] if x-1 >= 0 && image[x-1][y] == old_color\r\n end\r\n image\r\nend",
"def color=(value)\n @color = value\n end",
"def color=(value)\n @color = value\n end",
"def setmarkercolorind(*)\n super\n end",
"def color(color); end",
"def clear\n grid.map do |row|\n row.map! do |pixel|\n pixel = DEFAULT_FILL\n end\n end\n end",
"def []=(x, y, color)\n @modified[x, y] = color\n end",
"def setDefaultColor\n self.changeColor A_NORMAL, COLOR_WHITE, COLOR_BLACK\n end",
"def bucket_paint(x_index = nil, y_index = nil)\n if x_index.nil? || y_index.nil?\n x_index = @x_index\n y_index = @y_index\n end\n\n # Checks if the adjacent cell's color is same the current cell.\n # On success, it paints the current cell and moves to the next adjacent cells.\n return if @input_2d_array[x_index].nil? || @input_2d_array[x_index][y_index] != @old_color\n\n @input_2d_array[x_index][y_index] = @new_color\n\n # Paint right\n bucket_paint(x_index+1, y_index)\n # Paint bottom\n bucket_paint(x_index, y_index+1)\n # Paint left\n bucket_paint(x_index-1, y_index) if x_index-1 >= 0\n # Paint top\n bucket_paint(x_index, y_index-1) if y_index-1 >= 0\n end"
] | [
"0.7817586",
"0.7680768",
"0.7537821",
"0.74548644",
"0.7234923",
"0.7226518",
"0.70177794",
"0.69221914",
"0.68277735",
"0.67624754",
"0.66980034",
"0.6589905",
"0.6563709",
"0.6500531",
"0.64753926",
"0.6473927",
"0.63611907",
"0.63611335",
"0.6320901",
"0.62614584",
"0.62032354",
"0.61087066",
"0.61087066",
"0.6101561",
"0.60948205",
"0.60713506",
"0.60606945",
"0.60553396",
"0.60447353",
"0.60416305",
"0.601891",
"0.5999653",
"0.5975085",
"0.5962525",
"0.5924309",
"0.59104276",
"0.58524483",
"0.58392084",
"0.5806253",
"0.580579",
"0.58012",
"0.5783394",
"0.5773162",
"0.5757474",
"0.5752411",
"0.5745072",
"0.57403934",
"0.5729638",
"0.57263863",
"0.5686482",
"0.56697345",
"0.56683046",
"0.5637301",
"0.5632878",
"0.55885696",
"0.5578833",
"0.5578833",
"0.5570731",
"0.5537497",
"0.5494836",
"0.5494146",
"0.54640824",
"0.5453173",
"0.544281",
"0.5419062",
"0.54175633",
"0.5414549",
"0.5392301",
"0.538533",
"0.5385325",
"0.5384122",
"0.53760815",
"0.5371713",
"0.536633",
"0.5361674",
"0.53517854",
"0.531716",
"0.53039205",
"0.53034234",
"0.53018665",
"0.5300732",
"0.5274945",
"0.5272186",
"0.526769",
"0.5267306",
"0.5266123",
"0.52617043",
"0.525021",
"0.52326703",
"0.5207748",
"0.51998264",
"0.5181946",
"0.51803184",
"0.5173454",
"0.51678836",
"0.5162485",
"0.51523364",
"0.5150673",
"0.51499456"
] | 0.75035655 | 4 |
Changes font name of cell | def change_font_name(new_font_name = 'Verdana')
validate_worksheet
font = get_cell_font.dup
font.set_name(new_font_name)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_font_name(font_name='Verdana')\n validate_worksheet\n # Get copy of font object with modified name\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_font(font)\n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def font_name()\n validate_worksheet\n @workbook.fonts[font_id()][:font][:name][:attributes][:val]\n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def change_font(font)\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, font_id())\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id())\n xf[:fontId] = Integer(font_id.to_i)\n # Modify xf array and retrieve new xf id\n @style_index = modify_xf(@workbook, xf)\n end",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def default_font_name()\n end",
"def fontName\n return @font_name\n end",
"def update_font\n self.contents.font.name = @window.fontName\n #self.recalculate_maxlength\n self.refresh\n end",
"def font(row, col, sheet = nil)\n sheet ||= default_sheet\n read_cells(sheet)\n row, col = normalize(row, col)\n style_name = @style[sheet][[row, col]] || @style_defaults[sheet][col - 1] || 'Default'\n @font_style_definitions[style_name]\n end",
"def font=(font)\n @label.font = font\n end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def font(name)\n primitive \"font \\'#{name}\\'\"\n end",
"def font=(value)\n @font = value\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def name\n read_attribute(:font_name)\n end",
"def setfont(*args)\n @p.setfont(self, *args)\n end",
"def font(row, col, sheet = nil)\n sheet ||= @default_sheet\n read_cells(sheet)\n\n @fonts[sheet][normalize(row,col).to_a]\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def set_font(face, size)\n @curr_font = Gauges::FontRef.get(face, size)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def coordinate_labels_font_family\n if self.options[:font_family].blank?\n 'Helvetica Neue'\n else\n self.options[:font_family]\n end\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def font(name=nil, size=nil, options={})\n cur_page.font(name, size, options)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def default_font; \"example.font\"; end",
"def change_font_decor\n #If toggled to on, font is bold, otherwise not bold.\n # Best case here is to use NSAttributedString\n if @switch_font.on? == true\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}-Bold\",size:@font_size)\n else\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}\",size:@font_size)\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def set_text(font, size)\n\t \t@pdf.setfont(font, size)\n\t end",
"def cell_name\n name.underscore.sub(/_cell/, '')\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def reset_font_settings; set_data_font(:category_label); end",
"def change_font_color(font_color='000000')\n validate_worksheet\n #if arg is a color name, convert to integer\n Color.validate_color(font_color)\n # Get copy of font object with modified color\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_font(font)\n end",
"def set_font\n font_families.update(\n 'HealthQuestPDF' => {\n normal: HealthQuest::Engine.root.join('lib', 'fonts', 'sourcesanspro-regular-webfont.ttf'),\n medium: HealthQuest::Engine.root.join('lib', 'fonts', 'sourcesanspro-bold-webfont.ttf'),\n bold: HealthQuest::Engine.root.join('lib', 'fonts', 'bitter-bold.ttf')\n }\n )\n font 'HealthQuestPDF'\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def reset_font_settings; set_data_font(:scene_label); end",
"def pickerView(pickerView, didSelectRow:row, inComponent:component)\n if @switch_font.on? == true\n case row\n when 0\n @font_label.font = UIFont.fontWithName(\"Baskerville-Bold\",size:@font_size)\n @font_label.text = \"Baskerville\"\n when 1\n @font_label.font = UIFont.fontWithName(\"HelveticaNeue-Bold\",size:@font_size)\n @font_label.text = \"HelveticaNeue\"\n when 2\n @font_label.font = UIFont.fontWithName(\"Copperplate-Bold\",size:@font_size)\n @font_label.text = \"Copperplate\"\n end\n else\n case row\n when 0\n @font_label.font = UIFont.fontWithName(\"Baskerville\",size:@font_size)\n @font_label.text = \"Baskerville\"\n when 1\n @font_label.font = UIFont.fontWithName(\"HelveticaNeue\",size:@font_size)\n @font_label.text = \"HelveticaNeue\"\n when 2\n @font_label.font = UIFont.fontWithName(\"Copperplate\",size:@font_size)\n @font_label.text = \"Copperplate\"\n end\n end\n end",
"def font_switch(c,change_type,arg)\n case change_type\n when Worksheet::NAME\n unless arg.is_a?String\n raise 'Not a String'\n end\n c.change_font_name(arg)\n when Worksheet::SIZE\n unless arg.is_a?(Integer) || arg.is_a?(Float)\n raise 'Not a Number'\n end\n c.change_font_size(arg)\n when Worksheet::COLOR\n Color.validate_color(arg)\n c.change_font_color(arg)\n when Worksheet::ITALICS\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_italics(arg)\n when Worksheet::BOLD\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_bold(arg)\n when Worksheet::UNDERLINE\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_underline(arg)\n when Worksheet::STRIKETHROUGH\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_strikethrough(arg)\n else\n raise 'Invalid change_type'\n end\n end",
"def font=(font_path)\n @title_font.path = font_path unless @title_font.path\n @marker_font.path = font_path\n @legend_font.path = font_path\n end",
"def draw_name\n set_data_font(:name)\n clear_and_draw_text(0, @draw_y, contents_width, line_height, @quest.name, 1)\n end",
"def set_fonts\n font_families.update(\"Arial\" => {\n :normal => \"#{Rails.root}/vendor/assets/fonts/Arial.ttf\",\n :bold => \"#{Rails.root}/vendor/assets/fonts/Arial-Bold.ttf\"\n })\n font \"Arial\"\n end",
"def cell_name\n self.name.underscore.sub(/_cell\\Z/, '')\n end",
"def font(name=\"Helvetica\", size=nil)\n fontsize(size) if size\n @fname = name\n fontsize unless @fsize\n CGContextSelectFont(@ctx, @fname, @fsize, KCGEncodingMacRoman)\n end",
"def change_font_size(font_size=10)\n validate_worksheet\n if font_size.is_a?(Integer) || font_size.is_a?(Float)\n # Get copy of font object with modified size\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_font(font)\n else\n raise 'Argument must be a number'\n end\n end",
"def modify_font(workbook, style_index)\n # xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(workbook.get_style(style_index))\n\n #modify fonts array\n font_id = xf[:fontId]\n font = workbook.fonts[font_id.to_s][:font]\n\n #else, just change the attribute itself, done in calling method.\n if workbook.fonts[font_id.to_s][:count] > 1 || font_id == 0\n old_size = workbook.fonts.size.to_s\n workbook.fonts[old_size] = {}\n workbook.fonts[old_size][:font] = deep_copy(font)\n workbook.fonts[old_size][:count] = 1\n workbook.fonts[font_id.to_s][:count] -= 1\n\n #modify styles array\n font_id = old_size\n\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fontId] = font_id\n xf[:applyFont] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1 #returns new style_index\n else\n return style_index\n end\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def pbSetSmallFont(bitmap)\n bitmap.font.name=pbSmallFontName\n bitmap.font.size=25\nend",
"def stylename=(new_stylename)\n @stylename = new_stylename\n restyle!\n end",
"def set_data_font(data_type)\n @maqj_default_font = contents.font.dup unless @maqj_default_font\n contents.font.name = QuestData::FONTNAMES[data_type] ? \n QuestData::FONTNAMES[data_type] : @maqj_default_font.name\n contents.font.size = QuestData::FONTSIZES[data_type] ? \n QuestData::FONTSIZES[data_type] : @maqj_default_font.size\n contents.font.bold = QuestData::FONTBOLDS.keys.include?(data_type) ? \n QuestData::FONTBOLDS[data_type] : @maqj_default_font.bold\n contents.font.italic = QuestData::FONTITALICS.keys.include?(data_type) ?\n QuestData::FONTITALICS[data_type] : @maqj_default_font.italic\n case data_type\n when :objectives then change_color(@maqj_objective_color) if @maqj_objective_color\n when :name then change_color(quest_name_colour(@quest)) if @quest\n else\n change_color(text_color(QuestData::COLOURS[data_type])) if QuestData::COLOURS.keys.include?(data_type)\n end\n end",
"def font_setup\n %Q(font \"#{@font},#{@fontsize}\")\n end",
"def name_cell(item)\n data = if used?(item.breakdown)\n data = [\n [{ content: item.name, padding: [5, 0, 0, 5] }],\n [{ content: item.breakdown, size: 8, padding: [2, 0, 5, 5] }]\n ]\n\n options = {\n cell_style: {\n borders: []\n },\n position: :left\n }\n\n @pdf.make_table(data, options)\n else\n item.name\n end\n end",
"def set_font(key, font)\r\n # Add :Font key to content hash unless there is no key\r\n unless @content[pn(:Resources)].has_key?(pn(:Font))\r\n @content[pn(:Resources)].update(pn(:Font) => pd)\r\n end\r\n # Add font symbol to :Font hash\r\n unless @content[pn(:Resources)][pn(:Font)].has_key?(key)\r\n @content[pn(:Resources)][pn(:Font)].update(key => font.reference)\r\n end\r\n end",
"def changeCell(cell, symbol)\n\t\t@field[cell].value = symbol\n\tend",
"def change_font_bold(bolded=false)\n validate_worksheet\n # Get copy of font object with modified bold settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_font(font)\n end",
"def change_font_italics(italicized=false)\n validate_worksheet\n # Get copy of font object with modified italics settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_font(font)\n end",
"def set_font_from_path(font, bold_font)\n font_name = Pathname.new(font).basename\n @pdf.font_families.update(\n \"#{font_name}\" => {\n normal: font,\n italic: font,\n bold: bold_font,\n bold_italic: bold_font\n }\n )\n @pdf.font(font_name)\n end",
"def mystdfmt(ws)\n return if !(ws.is_a?(WIN32OLE))\n ws.rows(1).Font.Bold=true\n ws.UsedRange.Font.size=8\n ws.columns.autofit\nend",
"def set_user_font model\n model.unread?(@user) ? 'font-bold' : '' rescue ''\n end",
"def maiil_reset_font\n fn = MA_IconItemList::NUM_FONTNAME ? MA_IconItemList::NUM_FONTNAME : Font.default_name\n contents.font = Font.new(fn, MA_IconItemList::NUM_FONTSIZE) # name and size\n contents.font.bold = MA_IconItemList::NUM_BOLD # Set Bold\n contents.font.italic = MA_IconItemList::NUM_ITALIC # Set Italic\n contents.font.shadow = MA_IconItemList::NUM_SHADOW # Set Shadow\n contents.font.outline = MA_IconItemList::NUM_OUTLINE # Set outline\n oc = MA_IconItemList::NUM_OUT_COLOUR ? MA_IconItemList::NUM_OUT_COLOUR : Font.default_out_color\n contents.font.out_color = text_color(oc) # outline color\n end",
"def SetFont(family, style='', size=0)\n\t\t# save previous values\n\t\t@prevfont_family = @font_family;\n\t\t@prevfont_style = @font_style;\n\n\t\tfamily=family.downcase;\n\t\tif (family=='')\n\t\t\tfamily=@font_family;\n\t\tend\n\t\tif ((!@is_unicode) and (family == 'arial'))\n\t\t\tfamily = 'helvetica';\n\t\telsif ((family==\"symbol\") or (family==\"zapfdingbats\"))\n\t\t\tstyle='';\n\t\tend\n\t\t\n\t\tstyle=style.upcase;\n\n\t\tif (style.include?('U'))\n\t\t\t@underline=true;\n\t\t\tstyle= style.gsub('U','');\n\t\telse\n\t\t\t@underline=false;\n\t\tend\n\t\tif (style.include?('D'))\n\t\t\t@deleted=true;\n\t\t\tstyle= style.gsub('D','');\n\t\telse\n\t\t\t@deleted=false;\n\t\tend\n\t\tif (style=='IB')\n\t\t\tstyle='BI';\n\t\tend\n\t\tif (size==0)\n\t\t\tsize=@font_size_pt;\n\t\tend\n\n\t\t# try to add font (if not already added)\n\t\tAddFont(family, style);\n\t\t\n\t\t#Test if font is already selected\n\t\tif ((@font_family == family) and (@font_style == style) and (@font_size_pt == size))\n\t\t\treturn;\n\t\tend\n\t\t\n\t\tfontkey = family + style;\n\t\tstyle = '' if (@fonts[fontkey].nil? and !@fonts[family].nil?)\n \n\t\t#Test if used for the first time\n\t\tif (@fonts[fontkey].nil?)\n\t\t\t#Check if one of the standard fonts\n\t\t\tif (!@core_fonts[fontkey].nil?)\n\t\t\t\tif @@fpdf_charwidths[fontkey].nil?\n\t\t\t\t\t#Load metric file\n\t\t\t\t\tfile = family;\n\t\t\t\t\tif ((family!='symbol') and (family!='zapfdingbats'))\n\t\t\t\t\t\tfile += style.downcase;\n\t\t\t\t\tend\n\t\t\t\t\tif (getfontpath(file + '.rb').nil?)\n\t\t\t\t\t\t# try to load the basic file without styles\n\t\t\t\t\t\tfile = family;\n\t\t\t\t\t\tfontkey = family;\n\t\t\t\t\tend\n\t\t\t\t\trequire(getfontpath(file + '.rb'));\n \t\tfont_desc = TCPDFFontDescriptor.font(file)\n\t\t\t\t\tif ((@is_unicode and ctg.nil?) or ((!@is_unicode) and (@@fpdf_charwidths[fontkey].nil?)) )\n\t\t\t\t\t\tError(\"Could not include font metric file [\" + fontkey + \"]: \" + getfontpath(file + \".rb\"));\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\ti = @fonts.length + 1;\n\n\t\t\t\tif (@is_unicode)\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type' => font_desc[:type], 'name' => font_desc[:name], 'desc' => font_desc[:desc], 'up' => font_desc[:up], 'ut' => font_desc[:ut], 'cw' => font_desc[:cw], 'enc' => font_desc[:enc], 'file' => font_desc[:file], 'ctg' => font_desc[:ctg]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\telse\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type'=>'core', 'name'=>@core_fonts[fontkey], 'up'=>-100, 'ut'=>50, 'cw' => font_desc[:cw]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tError('Undefined font: ' + family + ' ' + style);\n\t\t\tend\n\t\tend\n\t\t#Select it\n\t\t@font_family = family;\n\t\t@font_style = style;\n\t\t@font_size_pt = size;\n\t\t@font_size = size / @k;\n\t\t@current_font = @fonts[fontkey]; # was & may need deep copy?\n\t\tif (@page>0)\n\t\t\tout(sprintf('BT /F%d %.2f Tf ET', @current_font['i'], @font_size_pt));\n\t\tend\n\tend",
"def change_color(color)\n self.font.color = color\n end",
"def cell_name\n # XXX Why is this needed? Can there be cells which have a different\n # @cell_name from their class's name?\n @cell_name || self.class.cell_name\n end",
"def cell_name\n # XXX Why is this needed? Can there be cells which have a different\n # @cell_name from their class's name?\n @cell_name || self.class.cell_name\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * row.worksheet.workbook.bold_font_multiplier) : font.sz\n end",
"def font=(font)\n set_font(font)\n generate_buffers\n end",
"def info_font(*args)\n @p.info_font(self, *args)\n end",
"def fonts(workbook, _center, heading, _colors)\n worksheet = workbook.add_worksheet('Fonts')\n\n worksheet.set_column(0, 0, 30)\n worksheet.set_column(1, 1, 10)\n\n worksheet.write(0, 0, \"Font name\", heading)\n worksheet.write(0, 1, \"Font size\", heading)\n\n fonts = []\n fonts << [10, 'Arial']\n fonts << [12, 'Arial']\n fonts << [14, 'Arial']\n fonts << [12, 'Arial Black']\n fonts << [12, 'Arial Narrow']\n fonts << [12, 'Century Schoolbook']\n fonts << [12, 'Courier']\n fonts << [12, 'Courier New']\n fonts << [12, 'Garamond']\n fonts << [12, 'Impact']\n fonts << [12, 'Lucida Handwriting']\n fonts << [12, 'Times New Roman']\n fonts << [12, 'Symbol']\n fonts << [12, 'Wingdings']\n fonts << [12, 'A font that doesn\\'t exist']\n\n i = 0\n fonts.each do |font|\n format = workbook.add_format\n\n format.set_size(font[0])\n format.set_font(font[1])\n\n i += 1\n worksheet.write(i, 0, font[1], format)\n worksheet.write(i, 1, font[0], format)\n end\n end",
"def change_row_italics(row=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_row_font(row, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def font(height, font_name=Gosu::default_font_name)\n @used_fonts ||= {}\n key = [font_name, height]\n if @used_fonts.include? key\n @font = @used_fonts[key]\n else\n @font = @used_fonts[key] = Gosu::Font.new(self, font_name, height)\n end\n end",
"def postscriptfontname=(postscriptFontName)\n @elementHash[:postscriptfontname] = postscriptFontName\n @elementHash.delete(:userinterfacefont)\n end",
"def draw_actor_name(actor, x, y)\n self.contents.font.size = Scan_Window_Font_Size\n self.contents.font.color = normal_color\n self.contents.draw_text(x, y, 120, 32, actor.name)\n end",
"def setLabel ( position, font, spacing ) \n\t@sets << \"key spacing #{spacing}\"\n\t@sets << \"key font '#{font}'\"\n\t@sets << \"key #{position}\"\n end",
"def load_font(family, height)\n end"
] | [
"0.81314415",
"0.7896164",
"0.7712866",
"0.77062994",
"0.72959864",
"0.7255562",
"0.71670526",
"0.7110613",
"0.71081805",
"0.6693014",
"0.6693014",
"0.66816",
"0.66816",
"0.66166097",
"0.6615892",
"0.65713406",
"0.65297943",
"0.6436224",
"0.6427392",
"0.6379348",
"0.6362969",
"0.6332783",
"0.633258",
"0.631478",
"0.62676936",
"0.62676936",
"0.62529284",
"0.62442535",
"0.62442535",
"0.62335247",
"0.6229093",
"0.6229093",
"0.620988",
"0.6189007",
"0.6157918",
"0.61545813",
"0.6152438",
"0.60866827",
"0.60866827",
"0.6076135",
"0.60525113",
"0.60525113",
"0.6052292",
"0.60522044",
"0.60442615",
"0.6031154",
"0.602856",
"0.6017266",
"0.5995838",
"0.5995838",
"0.59502786",
"0.5932864",
"0.5923859",
"0.5892133",
"0.58882594",
"0.58846027",
"0.5842431",
"0.5831265",
"0.58278",
"0.5803306",
"0.5803306",
"0.5799147",
"0.5787964",
"0.57809573",
"0.57702583",
"0.5763675",
"0.57472545",
"0.573796",
"0.5735604",
"0.57145244",
"0.56933975",
"0.5686498",
"0.56837165",
"0.5679866",
"0.5661748",
"0.564066",
"0.5620232",
"0.55860454",
"0.55790305",
"0.5578644",
"0.55568445",
"0.5548914",
"0.5542116",
"0.5527431",
"0.5506317",
"0.5500295",
"0.54996574",
"0.54937905",
"0.54937905",
"0.54842436",
"0.5474626",
"0.54562885",
"0.54440916",
"0.5442556",
"0.54325897",
"0.54314566",
"0.5409591",
"0.54006463",
"0.53995985"
] | 0.8321192 | 1 |
Changes font size of cell | def change_font_size(font_size = 10)
validate_worksheet
raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)
font = get_cell_font.dup
font.set_size(font_size)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def change_font_size(font_size=10)\n validate_worksheet\n if font_size.is_a?(Integer) || font_size.is_a?(Float)\n # Get copy of font object with modified size\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_font(font)\n else\n raise 'Argument must be a number'\n end\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * row.worksheet.workbook.bold_font_multiplier) : font.sz\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * 1.5) : font.sz\n end",
"def font_size\r\n @style.font_size || @default_font_size\r\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def SetFontSize(size)\n\t\t#Set font size in points\n\t\tif (@font_size_pt== size)\n\t\t\treturn;\n\t\tend\n\t\t@font_size_pt = size;\n\t\t@font_size = size.to_f / @k;\n\t\tif (@page > 0)\n\t\t\tout(sprintf('BT /F%d %.2f Tf ET', @current_font['i'], @font_size_pt));\n\t\tend\n\tend",
"def set_font_size size\n @font = @font_cache[size]\n return if (@font)\n @font = Gosu::Font.new(\n size,\n name: @settings.get(:font_name)\n )\n @font_cache[size] = @font\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def text_size s, f = font\n f.text_size s\n end",
"def text_size s, f = font\n f.text_size s\n end",
"def title_font_size=(value)\n @title_font.size = value\n end",
"def fontsize=(fontSize)\n @elementHash[:fontsize] = fontSize.to_f\n end",
"def set_text(font, size)\n\t \t@pdf.setfont(font, size)\n\t end",
"def mystdfmt(ws)\n return if !(ws.is_a?(WIN32OLE))\n ws.rows(1).Font.Bold=true\n ws.UsedRange.Font.size=8\n ws.columns.autofit\nend",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def font_size(size=nil)\n cur_page.font_size(size)\n end",
"def update_font\n self.contents.font.name = @window.fontName\n #self.recalculate_maxlength\n self.refresh\n end",
"def scale_fontsize(value)\n value * @scale\n end",
"def coordinate_labels_font_size\n if self.options[:font_size].blank?\n 10\n else\n self.options[:font_size]\n end\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def legend_font_size=(value)\n @legend_font.size = value\n end",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def marker_font_size=(value)\n @marker_font.size = value\n end",
"def font_size()\n validate_worksheet\n return @workbook.fonts[font_id()][:font][:sz][:attributes][:val]\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def widthcell\n 10\n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def set_font(face, size)\n @curr_font = Gauges::FontRef.get(face, size)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def font_descender(size = nil)\n size = @font_size if size.nil? or size <= 0\n\n select_font(\"Helvetica\") if @fonts.empty?\n hi = @fonts[@current_font].fontbbox[1].to_f\n (size * hi / 1000.0)\n end",
"def string_width(string, font_size)\n font_scale = font_size / row.worksheet.workbook.font_scale_divisor\n (string.to_s.size + 3) * font_scale\n end",
"def font_size\n styles['font-size'] ? styles['font-size'].to_f : DEFAULT_FONT_SIZE\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def adjusts_font_size_to_fit_width ; view.adjustsFontSizeToFitWidth ; end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def font_size=(font_size)\n if !font_size.nil? && font_size < 0\n fail ArgumentError, 'invalid value for \"font_size\", must be greater than or equal to 0.'\n end\n\n @font_size = font_size\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def width\n @font.text_width(self.text)\n end",
"def border_width=(width)\n self.each {|cell| cell.border_width = width}\n end",
"def configure_cell_attributes(workbook_cell)\n workbook_cell.format = template.formats[@cell.xpath(\"@table:style-name\").to_s]\n workbook_cell.colspan = @cell.xpath(\"@table:number-columns-spanned\").to_s\n workbook_cell.rowspan = @cell.xpath(\"@table:number-rows-spanned\").to_s\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def text_width(font_handle, text)\n end",
"def font_size\n size_tag = @styles.xpath('//w:docDefaults//w:rPrDefault//w:rPr//w:sz').first\n size_tag ? size_tag.attributes['val'].value.to_i / 2 : nil\n end",
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def fontsize(points=20)\n @fsize = points\n font unless @fname\n #CGContextSetFontSize(@ctx,points)\n CGContextSelectFont(@ctx, @fname, @fsize, KCGEncodingMacRoman)\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def font_size\n return nil unless @styles\n\n size_tag = @styles.xpath('//w:docDefaults//w:rPrDefault//w:rPr//w:sz').first\n size_tag ? size_tag.attributes['val'].value.to_i / 2 : nil\n end",
"def font_height(size = nil)\n size = @font_size if size.nil? or size <= 0\n\n select_font(\"Helvetica\") if @fonts.empty?\n hh = @fonts[@current_font].fontbbox[3].to_f - @fonts[@current_font].fontbbox[1].to_f\n (size * hh / 1000.0)\n end",
"def set_font_sizes(font_sizes:)\n {\n method: \"Page.setFontSizes\",\n params: { fontSizes: font_sizes }.compact\n }\n end",
"def change_font(font)\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, font_id())\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id())\n xf[:fontId] = Integer(font_id.to_i)\n # Modify xf array and retrieve new xf id\n @style_index = modify_xf(@workbook, xf)\n end",
"def font(row, col, sheet = nil)\n sheet ||= default_sheet\n read_cells(sheet)\n row, col = normalize(row, col)\n style_name = @style[sheet][[row, col]] || @style_defaults[sheet][col - 1] || 'Default'\n @font_style_definitions[style_name]\n end",
"def size_col(col) #:nodoc:\n # Look up the cell value to see if it has been changed\n unless @col_sizes[col].nil?\n width = @col_sizes[col]\n\n # The relationship is different for user units less than 1.\n if width < 1\n return (width *12).to_i\n else\n return (width *7 +5 ).to_i\n end\n else\n return 64\n end\n end",
"def font=(value)\n @font = value\n end",
"def stringwidth(text, fontsize)\n @p.stringwidth(text, self, fontsize)\n end",
"def setCellWidth(cellContent, width)\n numCharsTooShort = width - cellContent.length\n if numCharsTooShort < 0\n # cellContent is too long, so truncate\n return cellContent[0, [width - 3, 0].max] + '.' * [3, width].min\n else\n # cellContent is requested width or too short, so right-pad with zero or more spaces\n return cellContent + ' ' * numCharsTooShort\n end\n end",
"def height\n @font.height\n end",
"def shrink\n column_size = table.columns_size\n ratio = ((natural_width - renderer.width) / column_size.to_f).ceil\n\n widths = (0...column_size).reduce([]) do |lengths, col|\n width = (renderer.column_widths[col] - ratio)\n # basically ruby 2.4 Numeric#clamp\n width = width < minimum_width ? minimum_width : width\n width = width > renderer.width ? renderer.width : width\n lengths << width\n end\n distribute_extra_width(widths)\n end",
"def font=(font)\n @label.font = font\n end",
"def pbSetSmallFont(bitmap)\n bitmap.font.name=pbSmallFontName\n bitmap.font.size=25\nend",
"def shrink_to_fit(text)\n wrap(text)\n until @everything_printed || @font_size <= @min_font_size\n @font_size = [@font_size - 0.3, @min_font_size].max\n @document.font_size = @font_size\n wrap(text)\n end\n end",
"def text_line_width(text, size = nil)\n if text.kind_of?(Numeric) and size.kind_of?(String)\n text, size = size, text\n warn PDF::Writer::Lang[:text_width_parameters_reversed] % caller[0]\n end\n\n if size.nil? or size <= 0\n size = @font_size\n end\n\n # This function should not change any of the settings, though it will\n # need to track any tag which change during calculation, so copy them\n # at the start and put them back at the end.\n t_CTS = @current_text_state.dup\n\n select_font(\"Helvetica\") if @fonts.empty?\n # converts a number or a float to a string so it can get the width\n tt = text.to_s\n # hmm, this is where it all starts to get tricky - use the font\n # information to calculate the width of each character, add them up\n # and convert to user units\n width = 0\n font = @current_font\n\n pos = -1\n loop do\n pos += 1\n break if pos == tt.size\n font_change = true\n tag_size, text, font_change = quick_text_tags(text, pos, font_change)\n if tag_size != 0\n if font_change\n current_font!\n font = @current_font\n end\n pos += tag_size - 1\n else\n if \"<\" == tt[pos, 4]\n width += char_width(font, '<')\n pos += 3\n elsif \">\" == tt[pos, 4]\n width += char_width(font, '>')\n pos += 3\n elsif \"&\" == tt[pos, 5]\n width += char_width(font, '&')\n pos += 4\n else\n width += char_width(font, tt[pos, 1])\n end\n end\n end\n\n @current_text_state = t_CTS.dup\n current_font!\n\n (width * size / 1000.0)\n end",
"def width\n #@font.text_width(self.text)\n return 200\n end",
"def change_column_width(column_index, width_in_chars = RubyXL::ColumnRange::DEFAULT_WIDTH)\n change_column_width_raw(column_index, ((width_in_chars + (5.0 / RubyXL::Font::MAX_DIGIT_WIDTH)) * 256).to_i / 256.0)\n end",
"def table_size=(size)\n @cc.table_size = size\n end",
"def table_size=(size)\n @cc.table_size = size\n end",
"def width\n @font.text_width(self.text)\n end",
"def modify_font(workbook, style_index)\n # xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(workbook.get_style(style_index))\n\n #modify fonts array\n font_id = xf[:fontId]\n font = workbook.fonts[font_id.to_s][:font]\n\n #else, just change the attribute itself, done in calling method.\n if workbook.fonts[font_id.to_s][:count] > 1 || font_id == 0\n old_size = workbook.fonts.size.to_s\n workbook.fonts[old_size] = {}\n workbook.fonts[old_size][:font] = deep_copy(font)\n workbook.fonts[old_size][:count] = 1\n workbook.fonts[font_id.to_s][:count] -= 1\n\n #modify styles array\n font_id = old_size\n\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fontId] = font_id\n xf[:applyFont] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1 #returns new style_index\n else\n return style_index\n end\n end",
"def table_question_style\n {\n column_widths: { 0 => 460 },\n cell_style: {\n border_width: 0,\n size: 12,\n align: :left,\n font_style: :bold,\n padding: [0, 0, 0, 45]\n },\n header: true\n }\n end",
"def draw_table_six_columns(table_info, width_columns = [100, 100, 100, 90, 90, 40])\n table (table_info) do\n columns(0..5).border_width = 1\n columns(0..5).size = 7\n self.column_widths = width_columns\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \"\"\n \n end",
"def reset_table_cell\n mixin({\n text_align: 'left',\n font_weight: 'normal',\n vertical_align: 'middle'\n })\n end",
"def title_size=(v)\n @title.text_size = v unless v.to_s.empty?\n end",
"def font_switch(c,change_type,arg)\n case change_type\n when Worksheet::NAME\n unless arg.is_a?String\n raise 'Not a String'\n end\n c.change_font_name(arg)\n when Worksheet::SIZE\n unless arg.is_a?(Integer) || arg.is_a?(Float)\n raise 'Not a Number'\n end\n c.change_font_size(arg)\n when Worksheet::COLOR\n Color.validate_color(arg)\n c.change_font_color(arg)\n when Worksheet::ITALICS\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_italics(arg)\n when Worksheet::BOLD\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_bold(arg)\n when Worksheet::UNDERLINE\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_underline(arg)\n when Worksheet::STRIKETHROUGH\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_strikethrough(arg)\n else\n raise 'Invalid change_type'\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def change_font_name(font_name='Verdana')\n validate_worksheet\n # Get copy of font object with modified name\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_font(font)\n end",
"def italic_cell(options = {}, &block)\n cell({ font_style: :italic }.merge(options || {}), &block)\n end"
] | [
"0.77199495",
"0.7712393",
"0.7611653",
"0.71374416",
"0.6978516",
"0.692586",
"0.6728187",
"0.66601753",
"0.66556317",
"0.6619674",
"0.6533546",
"0.6522609",
"0.6501379",
"0.64703393",
"0.64372236",
"0.64372236",
"0.6417534",
"0.6395196",
"0.63928103",
"0.6318056",
"0.6261534",
"0.6261534",
"0.62583894",
"0.6257958",
"0.6203692",
"0.61963093",
"0.61930364",
"0.6180755",
"0.6138897",
"0.6097482",
"0.60168535",
"0.6014929",
"0.5958263",
"0.5953417",
"0.5953417",
"0.5935003",
"0.5926239",
"0.5911609",
"0.5911609",
"0.59033185",
"0.5886229",
"0.5870288",
"0.58594954",
"0.58594954",
"0.5811717",
"0.58059675",
"0.58052444",
"0.57569665",
"0.5726804",
"0.5715882",
"0.5712908",
"0.5712908",
"0.56469643",
"0.56469643",
"0.55837214",
"0.5552504",
"0.5532719",
"0.5509251",
"0.55013317",
"0.5501064",
"0.5501064",
"0.5473179",
"0.5469995",
"0.5463156",
"0.544655",
"0.5438885",
"0.5424099",
"0.5393868",
"0.53864694",
"0.5383967",
"0.53564763",
"0.5356279",
"0.5321718",
"0.531514",
"0.5310223",
"0.5292902",
"0.5286555",
"0.525576",
"0.52331257",
"0.52266836",
"0.5218274",
"0.52103996",
"0.5209626",
"0.51957774",
"0.518029",
"0.51714194",
"0.51714194",
"0.51700324",
"0.5166933",
"0.5152656",
"0.51398915",
"0.5138928",
"0.5135754",
"0.5130375",
"0.51166993",
"0.5107312",
"0.5107312",
"0.51071316",
"0.51016176"
] | 0.7972899 | 1 |
Changes font color of cell | def change_font_color(font_color = '000000')
validate_worksheet
Color.validate_color(font_color)
font = get_cell_font.dup
font.set_rgb_color(font_color)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def change_text_color(color)\n @text_color = color\n @text_entry.refresh\n self.redraw\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def change_color(color)\n self.font.color = color\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def font_color()\n validate_worksheet\n if @workbook.fonts[font_id()][:font][:color].nil?\n '000000' #black\n else\n @workbook.fonts[font_id()][:font][:color][:attributes][:rgb]\n end\n end",
"def change_font_color(font_color='000000')\n validate_worksheet\n #if arg is a color name, convert to integer\n Color.validate_color(font_color)\n # Get copy of font object with modified color\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_font(font)\n end",
"def ansi_formatting(cell, col, row); end",
"def after_cell_render_styling(column_name,cell_value,record)\n \"\"\n \n end",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def set_rgb_color(font_color)\n self.color = RubyXL::Color.new(:rgb => font_color.to_s)\n end",
"def font_color=(value)\n @title_font.color = value\n @marker_font.color = value\n @legend_font.color = value\n end",
"def textColor=(color)\n @label.textColor = color\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def before_cell_render_styling(column_name,cell_value,record)\n \"\"\n end",
"def coordinate_labels_font_color\n if self.options[:font_color].blank?\n '#333333'\n else\n self.options[:font_color]\n end\n end",
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def font(row, col, sheet = nil)\n sheet ||= default_sheet\n read_cells(sheet)\n row, col = normalize(row, col)\n style_name = @style[sheet][[row, col]] || @style_defaults[sheet][col - 1] || 'Default'\n @font_style_definitions[style_name]\n end",
"def color(val)\n raise 'Please provide a valid cell color' unless ('A'..'Z').include?(val)\n self.value = val\n end",
"def font_color(color=nil)\n cur_page.font_color(color)\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def SetTextColor(r, g=-1, b=-1, storeprev=false)\n\t\t#Set color for text\n\t\tif ((r==0 and :g==0 and :b==0) or :g==-1)\n\t\t\t@text_color=sprintf('%.3f g', r/255.0);\n\t\telse\n\t\t\t@text_color=sprintf('%.3f %.3f %.3f rg', r/255.0, g/255.0, b/255.0);\n\t\tend\n\t\t@color_flag=(@fill_color!=@text_color);\n\t\tif (storeprev)\n\t\t\t# store color as previous value\n\t\t\t@prevtext_color = [r, g, b]\n\t\tend\n\tend",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def highlight_cell(tbl, row, col, id, check: false)\n bg_color = '&#ade6dd' unless check\n\n tbl[row + 1][col + 1] = { content: id,\n check: check,\n class: 'td-full-slot',\n style: {'background-color'=> bg_color}}\n end",
"def shading_colour=(colour)\n self.each {|cell| cell.shading_colour = colour}\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def textColor\n return @text_color\n end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def cell_renderer(*val)\n if val.empty?\n @cell_renderer ||= create_default_cell_renderer\n else\n @cell_renderer = val[0] \n end\n end",
"def cell_renderer(*val)\n if val.empty?\n @cell_renderer ||= create_default_cell_renderer\n else\n @cell_renderer = val[0] \n end\n end",
"def TreeView_SetTextColor(hwnd, clr) send_treeview_message(hwnd, :SETTEXTCOLOR, lparam: clr) end",
"def change_font_color\n #Code essentially used from Clay Allsop's RubyMotion: iOS Development with Ruby\n #with a few tweeks\n #\n #Get the color_field text\n color_prefix = @color_field.text\n #Downcase the color_field text and append \"Color\"\n color_method = \"#{color_prefix.downcase}Color\"\n #If the color exists, create as UIColor.colorColor\n if UIColor.respond_to?(color_method)\n @font_color = UIColor.send(color_method)\n @font_label.textColor = @font_color\n #For fun I added that the text color also becomes the toggle background color when ON\n @switch_font.onTintColor = @font_color\n @switch_font.tintColor = @font_color\n else\n #If the color is not known, display an alert to inform user\n UIAlertView.alloc.initWithTitle(\"Invalid Color\",\n message: \"#{color_prefix.capitalize} is not a usable color at this time\",\n delegate: nil,\n cancelButtonTitle: \"OK\",\n otherButtonTitles: nil).show\n end\n end",
"def settextcolorind(*)\n super\n end",
"def repaint graphic, r=@row,c=@col, row_index=-1,value=@text, focussed=false, selected=false\n\n select_colors focussed, selected \n\n value=value.to_s\n if !@display_length.nil?\n if value.length > @display_length\n value = value[0..@display_length-1]\n end\n end\n len = @display_length || value.length\n graphic.printstring r, c, \"%-*s\" % [len, value], @color_pair, @attr\n end",
"def set_text_color_a(color = RFPDF::COLOR_PALETTE[:black], colorspace = :rgb)\n if colorspace == :cmyk\n SetCmykTextColor(color[0], color[1], color[2], color[3])\n else\n SetTextColor(color[0], color[1], color[2])\n end\n end",
"def color(text, color)\n if COLORS[color]\n \"#{start_color color}#{text}#{reset_color}\"\n end\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def bold_red(output)\n color('1;31', output)\n end",
"def draw_text_cs(x, y, w, h, t, a = 0)\r\n original_color = self.font.color.clone\r\n self.font.color = Color.new(0,0,0,100)\r\n draw_text(x + 2, y + 2, w, h, t, a)\r\n self.font.color = original_color\r\n draw_text(x, y, w, h, t, a) \r\n end",
"def ctrlSetTextColor _obj, _args\n \"_obj ctrlSetTextColor _args;\" \n end",
"def color(text, color, bold = false)\n bold = bold ? BOLD : \"\"\n \"#{bold}#{color}#{text}#{CLEAR}\"\n end",
"def init_colors\n $desc_color = \"#{GREEN}\" # color of description portion\n # color the title based on priority\n $p5color = \"#{BLUE}#{BOLD}\" \n $p4color = \"#{MAGENTA}\" \n $p3color = \"#{CYAN}#{BOLD}\" \n $p2color = \"#{BOLD}\"\n $p1color = \"#{YELLOW}#{ON_RED}\"\n #\n # color for only the type column\n $bugcolor = \"#{BLACK}#{ON_RED}\"\n $enhcolor = \"#{GREEN}\"\n $feacolor = \"#{CYAN}\"\n\n # color for row of started event\n $startedcolor = \"#{STANDOUT}\"\n\n cols = %x[tput colors] rescue 8\n cols = cols.to_i\n if cols >= 256\n $desc_color = \"\\x1b[38;5;236m\" # 256 colors, grey\n $p5color = \"\\x1b[38;5;57m\" # some kinda blue\n $p4color = \"\\x1b[38;5;239m\" # grey. 256 colors\n $p3color = \"\\x1b[38;5;244m\" # grey, 256 colors\n end\n end",
"def crisis_color\n return text_color(17)\n end",
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def color(text)\n \"\\e[31m#{text}\\e[0m\"\n end",
"def red(text)\n colorize text, \"\\033[1;31m\"\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def bolden(issue, row, color=:bold)\n bld = issue[@bolden_tag]\n bld = @custom_tags[@bolden_tag] if bld.nil?\n bld = bld.call(issue.dup) if bld.kind_of? Proc\n # ? truthy other than Ruby default?\n return row unless bld\n if row.kind_of? Array then\n row.map{|r| HighLine.color(r, color)}\n elsif row.kind_of? Hash then\n hsh={}\n row.each_pair{|k,v| hsh[k] = HighLine.color(v, color)}\n else\n HighLine.color(row.to_s, color)\n end\n end",
"def scr_bold\n print \"\\33[1m\"\nend",
"def green(text)\n colorize text, \"\\033[1;32m\"\n end",
"def style=(style)\n cells.each_with_index do | cell, index |\n s = style.is_a?(Array) ? style[index] : style\n cell.style = s\n end\n end",
"def output_color(text, color=text.to_i)\r\n # Color matches: 1 - Black; 2 - White; 3 - Red; 4 - Yellow; 5 - Green; 6 - Blue; 7 - Gold\r\n colors = { 1 => 30, 2 => 36, 3 => 31, 4 => 33, 5 => 35, 6 => 34, 7 => 220 }\r\n # \\e[47m Is for the grey foreground \\e[{color} is for picking the color and \\e[0m is for resetting the terminal.\r\n \"\\e[1m\\e[47m\\e[#{colors[color]}m#{text}\\e[0m\\e[22m\"\r\n end",
"def colorize(text, color)\n\t\"\\e[#{Colors[color]}m#{text}\\e[0m\"\nend",
"def text_colors\n @colors = {\n red: 31,\n yellow: 33,\n green: 32\n }\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def header_text_color\n block_for('header_text_color', '#dcdcdc')\n end",
"def colorize(color, text)\n \"\\e[#{color}m#{text}\\e[0m\"\n end",
"def highlight(row,scol,ecol)\n\t\t# only do rows that are on the screen\n\t\tif row < @linefeed then return end\n\t\tif row > (@linefeed + $screen.rows - 2) then return end\n\n\t\tif @text[row].length < 1 then return end\n\n\t\t# convert pos in text to pos on screen\n\t\tsc = bc2sc(row,scol)\n\t\tec = bc2sc(row,ecol)\n\n\t\t# replace tabs with spaces\n\t\tsline = tabs2spaces(@text[row])\n\t\t# get just string of interest\n\t\tif sc < @colfeed then sc = @colfeed end\n\t\tif ec < @colfeed then return end\n\t\tstr = sline[sc..ec]\n\t\tif ec == sline.length then str += \" \" end\n\t\tssc = sc - @colfeed\n\t\tsec = ec - @colfeed\n\n\t\tif (str.length+ssc) >= $screen.cols\n\t\t\tstr = str[0,($screen.cols-ssc)]\n\t\tend\n\n\t\t$screen.text_reverse(true)\n\t\t$screen.write_str((row-@linefeed+1),ssc,str)\n\t\t$screen.text_reverse(false)\n\tend",
"def color_table\n [0, 1, 4, 5, 7].each do |attr|\n puts '----------------------------------------------------------------'\n puts \"ESC[#{attr};Foreground;Background\"\n 30.upto(37) do |fg|\n 40.upto(47) do |bg|\n print \"\\033[#{attr};#{fg};#{bg}m #{fg};#{bg} \"\n end\n puts \"\\033[0m\"\n end\n end\n end",
"def SetCmykTextColor(c, m, y, k, storeprev=false)\n\t\t#Set color for text\n\t\t@text_color=sprintf('%.3f %.3f %.3f %.3f k', c, m, y, k);\n\t\t@color_flag=(@fill_color!=@text_color);\n\t\tif (storeprev)\n\t\t\t# store color as previous value\n\t\t\t@prevtext_color = [c, m, y, k]\n\t\tend\n\tend",
"def ListView_SetTextColor(hwnd, clrText) send_listview_message(hwnd, :SETTEXTCOLOR, lparam: clrText) end",
"def color(text, color_code)\n \"#{color_code}#{text}\\e[0m\"\n end",
"def colored_char\n color(char)\n end",
"def foreground_color(index)\n \"\\e[38;5;#{index}m\"\nend",
"def font(row, col, sheet = nil)\n sheet ||= @default_sheet\n read_cells(sheet)\n\n @fonts[sheet][normalize(row,col).to_a]\n end",
"def cell_style(models, options={})\n [models].flatten.compact.each do |m|\n m.apply_styles(options)\n end \n end",
"def blue = \"\\e[36m#{self}\\e[0m\"",
"def colorize(text, color_code); \"\\e[#{color_code}m#{text}\\e[0m\"; end",
"def color=(color)\n each_with_index do |cell, index|\n cell.color = color.is_a?(Array) ? color[index] : color\n end\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def set_cell(x, y, color)\n get_cell(x, y).color = color\n @last_cell_played = get_cell(x, y)\n end",
"def render pad, lineno, text\n if @hash\n dim = match_line text\n fg = dim.first\n bg = dim[1] || @default_colors[1]\n if dim.size == 3\n att = dim.last\n else\n att = @default_colors.last\n end\n cp = get_color($datacolor, fg, bg)\n else\n cp = @pair\n att = @default_colors[2]\n end\n\n FFI::NCurses.wattron(pad,FFI::NCurses.COLOR_PAIR(cp) | att)\n FFI::NCurses.mvwaddstr(pad, lineno, 0, text)\n FFI::NCurses.wattroff(pad,FFI::NCurses.COLOR_PAIR(cp) | att)\n end",
"def colortable\n names = %w(black red green yellow blue pink cyan white default)\n fgcodes = (30..39).to_a - [38]\n\n s = ''\n reg = \"\\e[%d;%dm%s\\e[0m\"\n bold = \"\\e[1;%d;%dm%s\\e[0m\"\n puts ' color table with these background codes:'\n puts ' 40 41 42 43 44 45 46 47 49'\n names.zip(fgcodes).each {|name,fg|\n s = \"#{fg}\"\n puts \"%7s \"%name + \"#{reg} #{bold} \"*9 % [fg,40,s,fg,40,s, fg,41,s,fg,41,s, fg,42,s,fg,42,s, fg,43,s,fg,43,s, \n fg,44,s,fg,44,s, fg,45,s,fg,45,s, fg,46,s,fg,46,s, fg,47,s,fg,47,s, fg,49,s,fg,49,s ]\n }\nend",
"def ListView_SetTextBkColor(hwnd, clrTextBk) send_listview_message(hwnd, :SETTEXTBKCOLOR, lparam: clrTextBk) end",
"def highlightCell\n disptype = @colvalues[@col]\n highlight = @highlight\n infolen = @info[@row][@col].size\n \n # Given the dominance of the color/attributes, we need to set the\n # current cell attribute.\n if @dominant == CDK::ROW\n highlight = (@rowtitle[@crow][0] || 0) & Ncurses::A_ATTRIBUTES\n elsif @dominant == CDK::COL\n highlight = (@coltitle[@ccol][0] || 0) & Ncurses::A_ATTRIBUTES\n end\n\n # If the column is only one char.\n (1..@colwidths[@ccol]).each do |x|\n ch = if x <= infolen && !Display.isHiddenDisplayType(disptype)\n then CDK.CharOf(@info[@row][@col][x - 1])\n else @filler\n end\n self.CurMatrixCell.mvwaddch(1, x, ch.ord | highlight)\n end\n self.CurMatrixCell.wmove(1, infolen + 1)\n self.CurMatrixCell.wrefresh\n end",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def red(string)\n \"\\033[0;33m#{string}\\033[0m\"\nend",
"def highlight text\n color_code = 7\n \"\\e[#{color_code}m#{text}\\e[0m\"\nend",
"def get_col_style(col)\n i = get_cols_index(col)\n if @cols[i].nil?\n @workbook.fonts['0'][:count] += 1\n return 0\n else\n return Integer(@cols[i][:attributes][:style])\n end\n end",
"def red(str)\n \"\\e[31m#{str}\\e[0m\"\nend"
] | [
"0.7460085",
"0.72489184",
"0.72067857",
"0.7135468",
"0.71241385",
"0.7105533",
"0.70734954",
"0.70734954",
"0.7011126",
"0.70002514",
"0.68595064",
"0.68219143",
"0.68219143",
"0.68104655",
"0.6794587",
"0.67466515",
"0.67338556",
"0.66503805",
"0.66452426",
"0.64654624",
"0.6400689",
"0.635936",
"0.6356986",
"0.6312874",
"0.62565696",
"0.6216346",
"0.62140554",
"0.61420035",
"0.61405295",
"0.6082263",
"0.6075267",
"0.6074587",
"0.606414",
"0.6006547",
"0.5993914",
"0.5993914",
"0.598763",
"0.59789187",
"0.5968625",
"0.5968625",
"0.59403294",
"0.59403294",
"0.5926472",
"0.59203905",
"0.58399445",
"0.58399445",
"0.5831051",
"0.58185863",
"0.5813238",
"0.58120424",
"0.578145",
"0.57731104",
"0.57647586",
"0.5742725",
"0.5742725",
"0.5707013",
"0.56805235",
"0.5671183",
"0.5663868",
"0.5661497",
"0.5655003",
"0.5650949",
"0.56489986",
"0.5648572",
"0.56307596",
"0.560001",
"0.5598657",
"0.55911463",
"0.5590822",
"0.5575326",
"0.5553519",
"0.55513996",
"0.5546417",
"0.5545773",
"0.5538997",
"0.5537136",
"0.553266",
"0.55322075",
"0.552534",
"0.5517698",
"0.5514443",
"0.5502855",
"0.54914063",
"0.5489927",
"0.54890805",
"0.5488869",
"0.547519",
"0.5467904",
"0.5467904",
"0.5458002",
"0.5447075",
"0.54456",
"0.5433224",
"0.5426965",
"0.5425562",
"0.5425562",
"0.5420126",
"0.5418047",
"0.54072803"
] | 0.769588 | 1 |
Changes font italics settings of cell | def change_font_italics(italicized = false)
validate_worksheet
font = get_cell_font.dup
font.set_italic(italicized)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_font_italics(italicized=false)\n validate_worksheet\n # Get copy of font object with modified italics settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_font(font)\n end",
"def change_row_italics(row=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_row_font(row, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def change_column_italics(col=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_column_font(col, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def change_font_decor\n #If toggled to on, font is bold, otherwise not bold.\n # Best case here is to use NSAttributedString\n if @switch_font.on? == true\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}-Bold\",size:@font_size)\n else\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}\",size:@font_size)\n end\n end",
"def italic_cell(options = {}, &block)\n cell({ font_style: :italic }.merge(options || {}), &block)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def bold_italic_cell(options = {}, &block)\n cell({ font_style: :bold_italic }.merge(options || {}), &block)\n end",
"def settextfontprec(*)\n super\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def maiil_reset_font\n fn = MA_IconItemList::NUM_FONTNAME ? MA_IconItemList::NUM_FONTNAME : Font.default_name\n contents.font = Font.new(fn, MA_IconItemList::NUM_FONTSIZE) # name and size\n contents.font.bold = MA_IconItemList::NUM_BOLD # Set Bold\n contents.font.italic = MA_IconItemList::NUM_ITALIC # Set Italic\n contents.font.shadow = MA_IconItemList::NUM_SHADOW # Set Shadow\n contents.font.outline = MA_IconItemList::NUM_OUTLINE # Set outline\n oc = MA_IconItemList::NUM_OUT_COLOUR ? MA_IconItemList::NUM_OUT_COLOUR : Font.default_out_color\n contents.font.out_color = text_color(oc) # outline color\n end",
"def italic; end",
"def italic; end",
"def font(row, col, sheet = nil)\n sheet ||= default_sheet\n read_cells(sheet)\n row, col = normalize(row, col)\n style_name = @style[sheet][[row, col]] || @style_defaults[sheet][col - 1] || 'Default'\n @font_style_definitions[style_name]\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def set_text(font, size)\n\t \t@pdf.setfont(font, size)\n\t end",
"def set_font_bitmap(bitmap)\n bitmap.font.bold = false\n bitmap.font.italic = false\n bitmap.font.color = Color::WHITE\n end",
"def set_font_bitmap(bitmap)\n bitmap.font.bold = false\n bitmap.font.italic = false\n bitmap.font.color = Color::WHITE\n end",
"def mystdfmt(ws)\n return if !(ws.is_a?(WIN32OLE))\n ws.rows(1).Font.Bold=true\n ws.UsedRange.Font.size=8\n ws.columns.autofit\nend",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def set_user_font model\n model.unread?(@user) ? 'font-bold' : '' rescue ''\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def font=(value)\n @font = value\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def is_italicized()\n validate_worksheet\n if @workbook.fonts[font_id()][:font][:i].nil?\n false\n else\n true\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def reset_font_settings; set_data_font(:category_label); end",
"def reset_font_settings(*args, &block)\n super(*args, &block)\n set_data_font(@maqj_font_data_type) if @maqj_font_data_type\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def setfont(*args)\n @p.setfont(self, *args)\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def set_data_font(data_type)\n @maqj_default_font = contents.font.dup unless @maqj_default_font\n contents.font.name = QuestData::FONTNAMES[data_type] ? \n QuestData::FONTNAMES[data_type] : @maqj_default_font.name\n contents.font.size = QuestData::FONTSIZES[data_type] ? \n QuestData::FONTSIZES[data_type] : @maqj_default_font.size\n contents.font.bold = QuestData::FONTBOLDS.keys.include?(data_type) ? \n QuestData::FONTBOLDS[data_type] : @maqj_default_font.bold\n contents.font.italic = QuestData::FONTITALICS.keys.include?(data_type) ?\n QuestData::FONTITALICS[data_type] : @maqj_default_font.italic\n case data_type\n when :objectives then change_color(@maqj_objective_color) if @maqj_objective_color\n when :name then change_color(quest_name_colour(@quest)) if @quest\n else\n change_color(text_color(QuestData::COLOURS[data_type])) if QuestData::COLOURS.keys.include?(data_type)\n end\n end",
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def font(row, col, sheet = nil)\n sheet ||= @default_sheet\n read_cells(sheet)\n\n @fonts[sheet][normalize(row,col).to_a]\n end",
"def change_font(font)\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, font_id())\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id())\n xf[:fontId] = Integer(font_id.to_i)\n # Modify xf array and retrieve new xf id\n @style_index = modify_xf(@workbook, xf)\n end",
"def configure_cell_attributes(workbook_cell)\n workbook_cell.format = template.formats[@cell.xpath(\"@table:style-name\").to_s]\n workbook_cell.colspan = @cell.xpath(\"@table:number-columns-spanned\").to_s\n workbook_cell.rowspan = @cell.xpath(\"@table:number-rows-spanned\").to_s\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def semanticize_font_styles!\n @document.tree.css('span').each do |node|\n if node.bold?\n node.node_name = 'strong'\n elsif node.italic?\n node.node_name = 'em'\n end\n end\n end",
"def bold_italic\n @font.fullname = 'MS-Mincho.BoldItalic'\n @desc.flags = flag_value(\n @desc_flags.merge(italic: true, force_bold: true)\n )\n @desc.italic_angle = -11\n @desc.stem_v = 156\n @font\n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def default_font; \"example.font\"; end",
"def set_text\n @text_field.text = @str\n @formats.each do |start, n, style|\n case style\n when FoxTextFormatter::STYLE_BOLD\n @text_field.changeStyle(start, n, 2)\n when FoxTextFormatter::STYLE_H1\n @text_field.changeStyle(start, n, 3)\n when FoxTextFormatter::STYLE_H2\n @text_field.changeStyle(start, n, 4)\n when FoxTextFormatter::STYLE_H3\n @text_field.changeStyle(start, n, 5)\n when FoxTextFormatter::STYLE_TELETYPE\n @text_field.changeStyle(start, n, 6)\n when FoxTextFormatter::STYLE_CODE\n @text_field.changeStyle(start, n, 7)\n when FoxTextFormatter::STYLE_EMPHASIS\n @text_field.changeStyle(start, n, 8)\n when FoxTextFormatter::STYLE_CLASS\n @text_field.changeStyle(start, n, 9)\n else\n @text_field.changeStyle(start, n, 1)\n end\n\n end\n end",
"def update_font\n self.contents.font.name = @window.fontName\n #self.recalculate_maxlength\n self.refresh\n end",
"def italic\n @info[:italic]\n end",
"def font_setup\n %Q(font \"#{@font},#{@fontsize}\")\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * row.worksheet.workbook.bold_font_multiplier) : font.sz\n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def change_font_underline(underlined = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_underline(underlined)\n update_font_references(font)\n end",
"def change_font_underline(underlined = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_underline(underlined)\n update_font_references(font)\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_font_bold(bolded=false)\n validate_worksheet\n # Get copy of font object with modified bold settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_font(font)\n end",
"def modify_font(workbook, style_index)\n # xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(workbook.get_style(style_index))\n\n #modify fonts array\n font_id = xf[:fontId]\n font = workbook.fonts[font_id.to_s][:font]\n\n #else, just change the attribute itself, done in calling method.\n if workbook.fonts[font_id.to_s][:count] > 1 || font_id == 0\n old_size = workbook.fonts.size.to_s\n workbook.fonts[old_size] = {}\n workbook.fonts[old_size][:font] = deep_copy(font)\n workbook.fonts[old_size][:count] = 1\n workbook.fonts[font_id.to_s][:count] -= 1\n\n #modify styles array\n font_id = old_size\n\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fontId] = font_id\n xf[:applyFont] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1 #returns new style_index\n else\n return style_index\n end\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def ansi_formatting(cell, col, row); end",
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def set_font(face, size)\n @curr_font = Gauges::FontRef.get(face, size)\n end",
"def font_switch(c,change_type,arg)\n case change_type\n when Worksheet::NAME\n unless arg.is_a?String\n raise 'Not a String'\n end\n c.change_font_name(arg)\n when Worksheet::SIZE\n unless arg.is_a?(Integer) || arg.is_a?(Float)\n raise 'Not a Number'\n end\n c.change_font_size(arg)\n when Worksheet::COLOR\n Color.validate_color(arg)\n c.change_font_color(arg)\n when Worksheet::ITALICS\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_italics(arg)\n when Worksheet::BOLD\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_bold(arg)\n when Worksheet::UNDERLINE\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_underline(arg)\n when Worksheet::STRIKETHROUGH\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_strikethrough(arg)\n else\n raise 'Invalid change_type'\n end\n end",
"def font=(font)\n @label.font = font\n end",
"def scr_bold\n print \"\\33[1m\"\nend",
"def after_cell_render_styling(column_name,cell_value,record)\n \"\"\n \n end",
"def reset_font_settings; set_data_font(:scene_label); end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def change_font_name(font_name='Verdana')\n validate_worksheet\n # Get copy of font object with modified name\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_font(font)\n end",
"def reset_font_settings(*args, &block)\n super(*args, &block)\n self.contents.font.name = MARW_CONFIGURATION[:window_fontname] unless\n MARW_CONFIGURATION[:window_fontname].empty?\n self.contents.font.size = MARW_CONFIGURATION[:window_fontsize] unless\n MARW_CONFIGURATION[:window_fontsize] < 8\n end",
"def SetFont(family, style='', size=0)\n\t\t# save previous values\n\t\t@prevfont_family = @font_family;\n\t\t@prevfont_style = @font_style;\n\n\t\tfamily=family.downcase;\n\t\tif (family=='')\n\t\t\tfamily=@font_family;\n\t\tend\n\t\tif ((!@is_unicode) and (family == 'arial'))\n\t\t\tfamily = 'helvetica';\n\t\telsif ((family==\"symbol\") or (family==\"zapfdingbats\"))\n\t\t\tstyle='';\n\t\tend\n\t\t\n\t\tstyle=style.upcase;\n\n\t\tif (style.include?('U'))\n\t\t\t@underline=true;\n\t\t\tstyle= style.gsub('U','');\n\t\telse\n\t\t\t@underline=false;\n\t\tend\n\t\tif (style.include?('D'))\n\t\t\t@deleted=true;\n\t\t\tstyle= style.gsub('D','');\n\t\telse\n\t\t\t@deleted=false;\n\t\tend\n\t\tif (style=='IB')\n\t\t\tstyle='BI';\n\t\tend\n\t\tif (size==0)\n\t\t\tsize=@font_size_pt;\n\t\tend\n\n\t\t# try to add font (if not already added)\n\t\tAddFont(family, style);\n\t\t\n\t\t#Test if font is already selected\n\t\tif ((@font_family == family) and (@font_style == style) and (@font_size_pt == size))\n\t\t\treturn;\n\t\tend\n\t\t\n\t\tfontkey = family + style;\n\t\tstyle = '' if (@fonts[fontkey].nil? and !@fonts[family].nil?)\n \n\t\t#Test if used for the first time\n\t\tif (@fonts[fontkey].nil?)\n\t\t\t#Check if one of the standard fonts\n\t\t\tif (!@core_fonts[fontkey].nil?)\n\t\t\t\tif @@fpdf_charwidths[fontkey].nil?\n\t\t\t\t\t#Load metric file\n\t\t\t\t\tfile = family;\n\t\t\t\t\tif ((family!='symbol') and (family!='zapfdingbats'))\n\t\t\t\t\t\tfile += style.downcase;\n\t\t\t\t\tend\n\t\t\t\t\tif (getfontpath(file + '.rb').nil?)\n\t\t\t\t\t\t# try to load the basic file without styles\n\t\t\t\t\t\tfile = family;\n\t\t\t\t\t\tfontkey = family;\n\t\t\t\t\tend\n\t\t\t\t\trequire(getfontpath(file + '.rb'));\n \t\tfont_desc = TCPDFFontDescriptor.font(file)\n\t\t\t\t\tif ((@is_unicode and ctg.nil?) or ((!@is_unicode) and (@@fpdf_charwidths[fontkey].nil?)) )\n\t\t\t\t\t\tError(\"Could not include font metric file [\" + fontkey + \"]: \" + getfontpath(file + \".rb\"));\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\ti = @fonts.length + 1;\n\n\t\t\t\tif (@is_unicode)\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type' => font_desc[:type], 'name' => font_desc[:name], 'desc' => font_desc[:desc], 'up' => font_desc[:up], 'ut' => font_desc[:ut], 'cw' => font_desc[:cw], 'enc' => font_desc[:enc], 'file' => font_desc[:file], 'ctg' => font_desc[:ctg]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\telse\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type'=>'core', 'name'=>@core_fonts[fontkey], 'up'=>-100, 'ut'=>50, 'cw' => font_desc[:cw]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tError('Undefined font: ' + family + ' ' + style);\n\t\t\tend\n\t\tend\n\t\t#Select it\n\t\t@font_family = family;\n\t\t@font_style = style;\n\t\t@font_size_pt = size;\n\t\t@font_size = size / @k;\n\t\t@current_font = @fonts[fontkey]; # was & may need deep copy?\n\t\tif (@page>0)\n\t\t\tout(sprintf('BT /F%d %.2f Tf ET', @current_font['i'], @font_size_pt));\n\t\tend\n\tend",
"def bold; end",
"def bold; end",
"def change_font_color(font_color='000000')\n validate_worksheet\n #if arg is a color name, convert to integer\n Color.validate_color(font_color)\n # Get copy of font object with modified color\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_font(font)\n end",
"def pbSetSmallFont(bitmap)\n bitmap.font.name=pbSmallFontName\n bitmap.font.size=25\nend",
"def change_color(color)\n self.font.color = color\n end",
"def font_weight=(value)\n allow_symbols_as_values(value)\n @style.set_property(StyleTextPropertiesElement::FontWeight, value)\n @style.set_property(StyleTextPropertiesElement::FontWeightAsian, value)\n @style.set_property(StyleTextPropertiesElement::FontWeightComplex, value)\n end",
"def display_text\n\t\t\t#specifying font to be used\n\t\t\t#text_font @font, 16\n\t\t\t#text \"Mmmmm ... Strings .... \", 10, 100\n\t\tend",
"def set_font_path()\n Dir.entries(@output_styles).each do |filename|\n next if filename =~ /^\\.\\.?$/\n filepath = \"#{@output_styles}/#{filename}\"\n\n text = File.read(filepath)\n new_text = text.gsub(/..\\/font/, \"#{@fonts_path}\")\n File.write(filepath, new_text)\n end\n end",
"def reset_table_cell\n mixin({\n text_align: 'left',\n font_weight: 'normal',\n vertical_align: 'middle'\n })\n end",
"def info_font(*args)\n @p.info_font(self, *args)\n end",
"def change_font_strikethrough(struckthrough=false)\n validate_worksheet\n # Get copy of font object with modified strikethrough settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_strikethrough(font, struckthrough)\n # Update font and xf array\n change_font(font)\n end",
"def change_text_color(color)\n @text_color = color\n @text_entry.refresh\n self.redraw\n end"
] | [
"0.72236353",
"0.678297",
"0.6767767",
"0.6639636",
"0.6613759",
"0.6494832",
"0.6494832",
"0.64336115",
"0.6389968",
"0.63753814",
"0.63357896",
"0.6335046",
"0.63281983",
"0.63196945",
"0.62855744",
"0.62762696",
"0.62762696",
"0.6264959",
"0.6257875",
"0.6174606",
"0.6145247",
"0.6145247",
"0.6112318",
"0.6064588",
"0.6064588",
"0.6062564",
"0.60602087",
"0.6043203",
"0.6043203",
"0.60109603",
"0.5994572",
"0.5994572",
"0.5992222",
"0.5989679",
"0.5942911",
"0.5931636",
"0.5931636",
"0.5914268",
"0.5907564",
"0.5890915",
"0.58767354",
"0.5856688",
"0.5846747",
"0.5846747",
"0.58425444",
"0.58413935",
"0.5791151",
"0.5772798",
"0.5754055",
"0.5725109",
"0.5714058",
"0.57121927",
"0.5676343",
"0.5676343",
"0.56703967",
"0.5664205",
"0.5663779",
"0.56570566",
"0.5649774",
"0.5601911",
"0.5589193",
"0.5581941",
"0.5561017",
"0.5542462",
"0.55332416",
"0.55332416",
"0.55116934",
"0.55116934",
"0.550301",
"0.5499373",
"0.54985684",
"0.54920584",
"0.5489602",
"0.5489298",
"0.5483334",
"0.5445888",
"0.5431445",
"0.54202676",
"0.53976613",
"0.5391886",
"0.5389167",
"0.53543997",
"0.5351362",
"0.5351362",
"0.53395104",
"0.53264886",
"0.5324627",
"0.5321088",
"0.5321088",
"0.53199315",
"0.5311553",
"0.5309114",
"0.5289856",
"0.5264891",
"0.5246467",
"0.5236725",
"0.5230217",
"0.52187127",
"0.52119285"
] | 0.78815913 | 1 |
Changes font bold settings of cell | def change_font_bold(bolded = false)
validate_worksheet
font = get_cell_font.dup
font.set_bold(bolded)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def change_font_bold(bolded=false)\n validate_worksheet\n # Get copy of font object with modified bold settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_font(font)\n end",
"def bold= bool\n self.weight = bool ? :bold : nil\n end",
"def change_column_bold(col=0, bolded=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified bold settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_column_font(col, Worksheet::BOLD, bolded, font, xf_id)\n end",
"def change_row_bold(row=0, bolded=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified bold settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_row_font(row, Worksheet::BOLD, bolded, font, xf_id)\n end",
"def bold_italic_cell(options = {}, &block)\n cell({ font_style: :bold_italic }.merge(options || {}), &block)\n end",
"def change_font_decor\n #If toggled to on, font is bold, otherwise not bold.\n # Best case here is to use NSAttributedString\n if @switch_font.on? == true\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}-Bold\",size:@font_size)\n else\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}\",size:@font_size)\n end\n end",
"def bold\n surround_with_ansi(ANSI_BOLD)\n end",
"def bold; end",
"def bold; end",
"def bold; \"\\e[1m#{self}\\e[22m\" end",
"def mystdfmt(ws)\n return if !(ws.is_a?(WIN32OLE))\n ws.rows(1).Font.Bold=true\n ws.UsedRange.Font.size=8\n ws.columns.autofit\nend",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def setBold(btn, nom)\n label = Gtk::Label.new\n label.set_markup(\"<span weight = 'ultrabold'>#{nom}</span>\")\n btn.add(label)\n btn.set_height_request(60)\n end",
"def bold(text)\n \"\\e[1m#{text}\\e[0m\"\n end",
"def bold(id)\n {\n 0x0190 => :BLSNORMAL, # Normal font weight\n 0x02BC => :BLSBOLD, # Bold font weight\n 0xFFFF => :ignored # Indicates that this specification is to be ignored\n }[id]\n end",
"def bold_italic\n @font.fullname = 'MS-Mincho.BoldItalic'\n @desc.flags = flag_value(\n @desc_flags.merge(italic: true, force_bold: true)\n )\n @desc.italic_angle = -11\n @desc.stem_v = 156\n @font\n end",
"def bold_title=(value)\n @title_font.bold = value\n end",
"def bold\n @info[:bold]\n end",
"def bold(string_to_bold)\n Term::ANSIColor.bold string_to_bold\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def bold(str)\n console_bold=\"\\e[1m\"\n console_reset=\"\\e[0m\"\n \"#{console_bold}#{str}#{console_reset}\"\nend",
"def set_user_font model\n model.unread?(@user) ? 'font-bold' : '' rescue ''\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def scr_bold\n print \"\\33[1m\"\nend",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def to_bold\n %(<strong>#{self}</strong>)\n end",
"def font_size\n return sz if sz\n\n font = styles.fonts[styles.cellXfs[style].fontId] || styles.fonts[0]\n font.b || (defined?(@b) && @b) ? (font.sz * row.worksheet.workbook.bold_font_multiplier) : font.sz\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def change_font_italics(italicized=false)\n validate_worksheet\n # Get copy of font object with modified italics settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_font(font)\n end",
"def bolden(issue, row, color=:bold)\n bld = issue[@bolden_tag]\n bld = @custom_tags[@bolden_tag] if bld.nil?\n bld = bld.call(issue.dup) if bld.kind_of? Proc\n # ? truthy other than Ruby default?\n return row unless bld\n if row.kind_of? Array then\n row.map{|r| HighLine.color(r, color)}\n elsif row.kind_of? Hash then\n hsh={}\n row.each_pair{|k,v| hsh[k] = HighLine.color(v, color)}\n else\n HighLine.color(row.to_s, color)\n end\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def font_weight=(value)\n allow_symbols_as_values(value)\n @style.set_property(StyleTextPropertiesElement::FontWeight, value)\n @style.set_property(StyleTextPropertiesElement::FontWeightAsian, value)\n @style.set_property(StyleTextPropertiesElement::FontWeightComplex, value)\n end",
"def bold\n \"<b>#{self}</b>\".html_safe\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def semanticize_font_styles!\n @document.tree.css('span').each do |node|\n if node.bold?\n node.node_name = 'strong'\n elsif node.italic?\n node.node_name = 'em'\n end\n end\n end",
"def bold(*args)\n color('1', *args)\n end",
"def bold(output)\n color('1', output)\n end",
"def change_column_italics(col=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_column_font(col, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def change_row_italics(row=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_row_font(row, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def modify_font(workbook, style_index)\n # xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(workbook.get_style(style_index))\n\n #modify fonts array\n font_id = xf[:fontId]\n font = workbook.fonts[font_id.to_s][:font]\n\n #else, just change the attribute itself, done in calling method.\n if workbook.fonts[font_id.to_s][:count] > 1 || font_id == 0\n old_size = workbook.fonts.size.to_s\n workbook.fonts[old_size] = {}\n workbook.fonts[old_size][:font] = deep_copy(font)\n workbook.fonts[old_size][:count] = 1\n workbook.fonts[font_id.to_s][:count] -= 1\n\n #modify styles array\n font_id = old_size\n\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fontId] = font_id\n xf[:applyFont] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1 #returns new style_index\n else\n return style_index\n end\n end",
"def bold(*args); end",
"def change_column_font_size(col=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_column_font(col, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def bold_message(message)\n Format(:bold, message)\n end",
"def configure_cell_attributes(workbook_cell)\n workbook_cell.format = template.formats[@cell.xpath(\"@table:style-name\").to_s]\n workbook_cell.colspan = @cell.xpath(\"@table:number-columns-spanned\").to_s\n workbook_cell.rowspan = @cell.xpath(\"@table:number-rows-spanned\").to_s\n end",
"def to_bold_s\n \"\\e[1m#{titre}\\e[22m: #{summary}.\"\n end",
"def change_column_font_name(col=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_column_font(col, Worksheet::NAME, font_name, font, xf_id)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def cnt_bold(c)\n \"#{c}\"\n end",
"def change_font_underline(underlined = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_underline(underlined)\n update_font_references(font)\n end",
"def change_font_underline(underlined = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_underline(underlined)\n update_font_references(font)\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def italic_cell(options = {}, &block)\n cell({ font_style: :italic }.merge(options || {}), &block)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def set_font_bitmap(bitmap)\n bitmap.font.bold = false\n bitmap.font.italic = false\n bitmap.font.color = Color::WHITE\n end",
"def set_font_bitmap(bitmap)\n bitmap.font.bold = false\n bitmap.font.italic = false\n bitmap.font.color = Color::WHITE\n end",
"def font_switch(c,change_type,arg)\n case change_type\n when Worksheet::NAME\n unless arg.is_a?String\n raise 'Not a String'\n end\n c.change_font_name(arg)\n when Worksheet::SIZE\n unless arg.is_a?(Integer) || arg.is_a?(Float)\n raise 'Not a Number'\n end\n c.change_font_size(arg)\n when Worksheet::COLOR\n Color.validate_color(arg)\n c.change_font_color(arg)\n when Worksheet::ITALICS\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_italics(arg)\n when Worksheet::BOLD\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_bold(arg)\n when Worksheet::UNDERLINE\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_underline(arg)\n when Worksheet::STRIKETHROUGH\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_strikethrough(arg)\n else\n raise 'Invalid change_type'\n end\n end",
"def change_column_font_color(col=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_column_font(col, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def change_font(font)\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, font_id())\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id())\n xf[:fontId] = Integer(font_id.to_i)\n # Modify xf array and retrieve new xf id\n @style_index = modify_xf(@workbook, xf)\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def font_switch(change_type, arg)\n case change_type\n when Worksheet::NAME then change_font_name(arg)\n when Worksheet::SIZE then change_font_size(arg)\n when Worksheet::COLOR then change_font_color(arg)\n when Worksheet::ITALICS then change_font_italics(arg)\n when Worksheet::BOLD then change_font_bold(arg)\n when Worksheet::UNDERLINE then change_font_underline(arg)\n when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)\n else raise 'Invalid change_type'\n end\n end",
"def set_font_from_path(font, bold_font)\n font_name = Pathname.new(font).basename\n @pdf.font_families.update(\n \"#{font_name}\" => {\n normal: font,\n italic: font,\n bold: bold_font,\n bold_italic: bold_font\n }\n )\n @pdf.font(font_name)\n end",
"def bold_puts(msg)\n output.puts(bold(msg))\n end",
"def key_value(options = {}, &block)\n options ||= {}\n\n if options[:key]\n options[:values] ||= []\n options[:values][0] = options[:key]\n end\n if options[:value]\n options[:values] ||= []\n options[:values][1] = options[:value]\n end\n\n options = {\n cell_1_font_style: :bold\n }.merge(options.except(:key, :value))\n\n cells options, &block\n end",
"def bold?\n parent?(Tree::Bold)\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def font_weight(weight)\n if weight.is_a?(WeightType)\n primitive \"font-weight #{FONT_WEIGHT_NAMES[weight.to_i]}\"\n else\n primitive \"font-weight #{Integer(weight)}\"\n end\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def change_font_size(font_size=10)\n validate_worksheet\n if font_size.is_a?(Integer) || font_size.is_a?(Float)\n # Get copy of font object with modified size\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_font(font)\n else\n raise 'Argument must be a number'\n end\n end",
"def bold_print(text)\n text.split(//).each do |ch|\n print ch, BS, ch\n end\n end",
"def change_column_strikethrough(col=0, struckthrough=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified strikethrough settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_strikethrough(font, struckthrough)\n # Update font and xf array\n change_column_font(col, Worksheet::STRIKETHROUGH, struckthrough, font, xf_id)\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def font(row, col, sheet = nil)\n sheet ||= default_sheet\n read_cells(sheet)\n row, col = normalize(row, col)\n style_name = @style[sheet][[row, col]] || @style_defaults[sheet][col - 1] || 'Default'\n @font_style_definitions[style_name]\n end",
"def bold text, pry=(defined?(_pry_) && _pry_) || Pry\n (pry and pry.color) ? \"\\e[1m#{text}\\e[0m\" : text\n end",
"def change_font_underline(underlined=false)\n validate_worksheet\n # Get copy of font object with modified underline settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_font(font)\n end",
"def bold?\n @node.name == 'span' &&\n @node.has_attribute?('class') &&\n @css_analyzer.bold?(@node.attribute('class').value)\n end",
"def setBKattr(attrib)\n @win.wbkgd(attrib)\n (0..@vrows).each do |x|\n (0..@vcols).each do |y|\n # wbkgd (MATRIX_CELL (widget, x, y), attrib);\n end\n end\n end",
"def colored_string\n color(to_bold_s)\n end",
"def h3; self.green.bold end",
"def change_font_color(font_color='000000')\n validate_worksheet\n #if arg is a color name, convert to integer\n Color.validate_color(font_color)\n # Get copy of font object with modified color\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_font(font)\n end",
"def table_content\n # This makes a call to gift_rows and gets back an array of data that will \n # populate the columns and rows of a table I then included some styling to \n # include a header and make its text bold. I made the row background colors \n # alternate between grey and white Then I set the table column widths\n table gift_rows do\n row(0).font_style = :bold\n self.header = true\n self.row_colors = ['DDDDDD', 'FFFFFF']\n self.column_widths = [180, 180, 180]\n end\n end"
] | [
"0.8035638",
"0.7800186",
"0.73778224",
"0.7356081",
"0.7309667",
"0.70927423",
"0.6959655",
"0.69148815",
"0.6847863",
"0.6847863",
"0.67769545",
"0.67683005",
"0.674681",
"0.674681",
"0.66853034",
"0.66853034",
"0.66616106",
"0.66564745",
"0.6619126",
"0.6587305",
"0.65795684",
"0.65689945",
"0.6386688",
"0.63277584",
"0.63129693",
"0.63065016",
"0.62924314",
"0.627787",
"0.62609434",
"0.6221754",
"0.6204245",
"0.6182182",
"0.61703545",
"0.61377084",
"0.6117152",
"0.6105179",
"0.60689443",
"0.6042911",
"0.6013855",
"0.60116255",
"0.60011953",
"0.60011953",
"0.5902722",
"0.5891608",
"0.58734703",
"0.58572453",
"0.58538157",
"0.58236927",
"0.58221275",
"0.57751346",
"0.5755175",
"0.57543045",
"0.57511246",
"0.57012266",
"0.56124073",
"0.56009877",
"0.56009877",
"0.55934983",
"0.55862844",
"0.5579039",
"0.5572352",
"0.5572352",
"0.55685776",
"0.5554549",
"0.55192316",
"0.55192316",
"0.5495589",
"0.5495589",
"0.5453459",
"0.5442696",
"0.5442696",
"0.54325265",
"0.54325265",
"0.54236495",
"0.53780603",
"0.5346485",
"0.53449976",
"0.53449976",
"0.5331659",
"0.5313713",
"0.52942455",
"0.5291035",
"0.52873594",
"0.5251057",
"0.52471375",
"0.5242146",
"0.5225947",
"0.5219159",
"0.52101254",
"0.52033705",
"0.5180202",
"0.51688766",
"0.51376283",
"0.511466",
"0.5101056",
"0.50947183",
"0.50932753",
"0.50656265",
"0.5065564"
] | 0.8302406 | 1 |
Changes font underline settings of cell | def change_font_underline(underlined = false)
validate_worksheet
font = get_cell_font.dup
font.set_underline(underlined)
update_font_references(font)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_font_underline(underlined=false)\n validate_worksheet\n # Get copy of font object with modified underline settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_font(font)\n end",
"def change_row_underline(row=0, underlined=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified underline settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_row_font(row, Worksheet::UNDERLINE, underlined, font, xf_id)\n end",
"def change_column_underline(col=0, underlined=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified underline settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_underline(font, underlined)\n # Update font and xf array\n change_column_font(col, Worksheet::UNDERLINE, underlined, font, xf_id)\n end",
"def underline_cell(options = {}, &block)\n cell({ borders: [ :bottom ], border_width: 0.5 }.merge(options || {}), &block)\n end",
"def underline; end",
"def underline; end",
"def underline(underline=nil)\n cur_page.underline(underline)\n end",
"def underline\n surround_with_ansi(ANSI_UNDERLINE)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if @strong_on\n \"</strong></font>\"\n else\n \"</font>\"\n end\n \n end",
"def header_format; self.underline end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</strong></font>'\n \n end",
"def no_underline\n reset_prev_formatting self, :underline\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>'\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n\n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \n \"</font>\"\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n '</font>' if column_name != \"id\"\n \n end",
"def underline(text, options = {})\n options = { at: [0, 0] }.merge(options || {})\n text ||= ''\n text = Prawn::Text::NBSP if text.empty?\n text_width = options[:width] || width_of(text)\n text_height = options[:height] || height_of(text, width: text_width)\n text_box text, at: options[:at], width: text_width, height: text_height\n stored_line_width = line_width\n stored_stroke_color = stroke_color\n self.line_width = 0.5\n self.stroke_color = '000000'\n y = options[:at][1] - text_height + 2.pt\n x = options[:at][0]\n stroke_line [x, y], [ x + text_width, y ]\n self.line_width = stored_line_width\n self.stroke_color = stored_stroke_color\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_font_bold(bolded = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_bold(bolded)\n update_font_references(font)\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf, get_row_style(row_index))\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def change_row_font(row_index, change_type, arg, font)\n validate_workbook\n ensure_cell_exists(row_index)\n\n xf = workbook.register_new_font(font, get_row_xf(row_index))\n row = sheet_data[row_index]\n row.style_index = workbook.register_new_xf(xf)\n row.cells.each { |c| c.font_switch(change_type, arg) unless c.nil? }\n end",
"def underline_on(weight=1)\n write_bytes(27, 45, weight)\n end",
"def bold; \"\\e[1m#{self}\\e[22m\" end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def change_font_italics(italicized = false)\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_italic(italicized)\n update_font_references(font)\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n if column_name!=\"id\"\n '</font>'\n end\n end",
"def add_underline symbol\n ('_' + symbol.to_s).to_sym\nend",
"def mystdfmt(ws)\n return if !(ws.is_a?(WIN32OLE))\n ws.rows(1).Font.Bold=true\n ws.UsedRange.Font.size=8\n ws.columns.autofit\nend",
"def css_inline_config\n {\n css: {\n properties: %w(font-style font-weight text-decoration)\n }\n }\n end",
"def change_row_strikethrough(row=0, struckthrough=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified strikethrough settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_strikethrough(font, struckthrough)\n # Update font and xf array\n change_row_font(row, Worksheet::STRIKETHROUGH, struckthrough, font, xf_id)\n end",
"def change_row_bold(row=0, bolded=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified bold settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_row_font(row, Worksheet::BOLD, bolded, font, xf_id)\n end",
"def underline(str)\n str += \"\\n\" + \"-\"*str.length\n end",
"def change_row_italics(row=0, italicized=false)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified italics settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_row_font(row, Worksheet::ITALICS, italicized, font, xf_id)\n end",
"def underline(str)\n \"<u>#{str.to_s}</u>\"\n end",
"def setarrowstyle(*)\n super\n end",
"def after_cell_render_styling(column_name,cell_value,record)\n \"\"\n \n end",
"def change_font_decor\n #If toggled to on, font is bold, otherwise not bold.\n # Best case here is to use NSAttributedString\n if @switch_font.on? == true\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}-Bold\",size:@font_size)\n else\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}\",size:@font_size)\n end\n end",
"def ansi_formatting(cell, col, row); end",
"def line_style(data_set_index, options={})\n @line_styles[data_set_index] = \"#{options[:line_thickness]}\"\n @line_styles[data_set_index] += \",#{options[:length_segment]},#{options[:length_blank]}\" if options[:length_segment]\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def update_font_references(modified_font)\n xf = workbook.register_new_font(modified_font, get_cell_xf)\n self.style_index = workbook.register_new_xf(xf)\n end",
"def before_cell_render_styling(column_name,cell_value,record)\n \"\"\n end",
"def bold_cell(options = {}, &block)\n cell({ font_style: :bold }.merge(options || {}), &block)\n end",
"def process(type, text, page)\n if type == \"Underline\"\n if @out[-1].index(text.strip)\n @out << @out.pop.gsub(text.strip,\"::::#{text.strip}::::\")\n else\n type = \"Underline-standalone\"\n end\n end\n return type == \"Underline\" ? nil : format(type, text, page) \nend",
"def scr_bold\n print \"\\33[1m\"\nend",
"def change_column_strikethrough(col=0, struckthrough=false)\n # Get style object\n xf_id = xf_id(get_col_style(col))\n # Get copy of font object with modified strikethrough settings\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_strikethrough(font, struckthrough)\n # Update font and xf array\n change_column_font(col, Worksheet::STRIKETHROUGH, struckthrough, font, xf_id)\n end",
"def underline_message(message)\n Format(:underline, message)\n end",
"def settextfontprec(*)\n super\n end",
"def change_font_strikethrough(struckthrough=false)\n validate_worksheet\n # Get copy of font object with modified strikethrough settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_strikethrough(font, struckthrough)\n # Update font and xf array\n change_font(font)\n end",
"def set_user_font model\n model.unread?(@user) ? 'font-bold' : '' rescue ''\n end",
"def h3; self.green.bold end",
"def resize_strike_through\n text_size = text.sizeWithAttributes({ NSFontAttributeName => font })\n strikethrough_layer.frame = CGRectMake(0, self.bounds.size.height/2,\n text_size.width, STRIKEOUT_THICKNESS)\n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def table_answer_style\n {\n cell_style: {\n border_width: 0,\n size: 12,\n align: :left,\n padding: [0, 0, 0, 35]\n },\n header: true\n }\n end",
"def bold\n surround_with_ansi(ANSI_BOLD)\n end",
"def setBold(btn, nom)\n label = Gtk::Label.new\n label.set_markup(\"<span weight = 'ultrabold'>#{nom}</span>\")\n btn.add(label)\n btn.set_height_request(60)\n end",
"def set_style(*v)\n styles = v.map { |i| @@style[i.to_sym] }.transpose\n prepend(\"\\e[\" << styles[0].join(';') << 'm')\n concat(\"\\e[\" << styles[1].join(';') << 'm')\n end",
"def process(type, text, page)\n if type == \"Underline\"\n if @out[-1] && @out[-1].index(text.strip)\n @out << @out.pop.gsub(text.strip,\"::::#{text.strip}::::\")\n else\n type = \"Underline-standalone\"\n end\n end\n return type == \"Underline\" ? nil : format(type, text, page)\nend",
"def misc(workbook, _center, _heading, _colors)\n worksheet = workbook.add_worksheet('Miscellaneous')\n\n worksheet.set_column(2, 2, 25)\n\n format01 = workbook.add_format\n format02 = workbook.add_format\n format03 = workbook.add_format\n format04 = workbook.add_format\n format05 = workbook.add_format\n format06 = workbook.add_format\n format07 = workbook.add_format\n\n format01.set_underline(0x01)\n format02.set_underline(0x02)\n format03.set_underline(0x21)\n format04.set_underline(0x22)\n format05.set_font_strikeout\n format06.set_font_outline\n format07.set_font_shadow\n\n worksheet.write(1, 2, 'Underline 0x01', format01)\n worksheet.write(3, 2, 'Underline 0x02', format02)\n worksheet.write(5, 2, 'Underline 0x21', format03)\n worksheet.write(7, 2, 'Underline 0x22', format04)\n worksheet.write(9, 2, 'Strikeout', format05)\n worksheet.write(11, 2, 'Outline (Macintosh only)', format06)\n worksheet.write(13, 2, 'Shadow (Macintosh only)', format07)\n end",
"def change_row_font_size(row=0, font_size=10)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified size\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:sz][:attributes][:val] = font_size\n # Update font and xf array\n change_row_font(row, Worksheet::SIZE, font_size, font, xf_id)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_row_font_color(row=0, font_color='000000')\n Color.validate_color(font_color)\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified color\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font = modify_font_color(font, font_color.to_s)\n # Update font and xf array\n change_row_font(row, Worksheet::COLOR, font_color, font, xf_id)\n end",
"def bold; end",
"def bold; end",
"def default_line_style\n [1.5, 1, 0]\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def table_question_style\n {\n column_widths: { 0 => 460 },\n cell_style: {\n border_width: 0,\n size: 12,\n align: :left,\n font_style: :bold,\n padding: [0, 0, 0, 45]\n },\n header: true\n }\n end",
"def change_font_bold(bolded=false)\n validate_worksheet\n # Get copy of font object with modified bold settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_bold(font, bolded)\n # Update font and xf array\n change_font(font)\n end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def makeAttributed(parameters = {})\n options = { text: 'text',\n font: NSFont.fontWithName( 'Menlo', size: 12 ),\n traits: 0, # trait mask (NSBoldFontMask, NSItalicFontMask, etc)\n textColor: NSColor.textColor,\n backgroundColor: NSColor.clearColor, # use textView background\n underline: false\n }.merge(parameters)\n return options[:text] if options[:text].class.to_s.end_with?('AttributedString')\n paraStyle = NSMutableParagraphStyle.alloc.init.tap do |style|\n style.tabStops = (1..21).each_with_object([]) do |index, tabArray| # add a few more\n tabArray.addObject(NSTextTab.alloc\n .initWithType( NSLeftTabStopType,\n location: index * 27.0 ))\n end\n end\n underline = options[:underline] ? NSUnderlineStyleSingle : NSUnderlineStyleNone\n NSMutableAttributedString.alloc.initWithString(options[:text]).tap do |attrText|\n attrText.beginEditing\n [[NSParagraphStyleAttributeName, paraStyle],\n [NSUnderlineStyleAttributeName, underline],\n [NSForegroundColorAttributeName, options[:textColor]],\n [NSBackgroundColorAttributeName, options[:backgroundColor]],\n [NSFontAttributeName, options[:font]]].each do |attribute, value|\n attrText.addAttribute(attribute, value: value, range: [0, attrText.length])\n end\n attrText.applyFontTraits(options[:traits], range: [0, attrText.length])\n attrText.endEditing\n end\nend",
"def italic; end",
"def italic; end",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def ignored_submission_style(s)\n \"text-decoration: \" + (s.ignored? ? \"line-through\" : \"none\") + \";\"\n end",
"def underline(string)\n '-' * string.size\n end",
"def get_row_style(row)\n if @row_styles[(row+1).to_s].nil?\n @row_styles[(row+1).to_s] = {}\n @row_styles[(row+1).to_s][:style] = '0'\n @workbook.fonts['0'][:count] += 1\n end\n return @row_styles[(row+1).to_s][:style]\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def change_font_color(font_color = '000000')\n validate_worksheet\n Color.validate_color(font_color)\n\n font = get_cell_font.dup\n font.set_rgb_color(font_color)\n update_font_references(font)\n end",
"def set_textborderwidth(width)\n @text.borderwidth(width)\n end",
"def paint_row(dc, y, addr, row_num)\n dc.set_pen(Wx::TRANSPARENT_PEN)\n dc.set_text_foreground(@addr_color)\n addr_str = addr.to_s(16).rjust(2,\"0\")\n w = dc.get_text_extent(addr_str)[0]\n dc.draw_text(addr_str, (@hex0 - w - @asc_width), y) \n if row_num\n dc.set_brush(@alt_row_bgs[ row_num % @alt_row_bgs.size ])\n dc.draw_rectangle(@hex0, y, @row_width, @txt_height)\n end\n end",
"def bold_italic_cell(options = {}, &block)\n cell({ font_style: :bold_italic }.merge(options || {}), &block)\n end",
"def font_size\n @font_size ||= [cell_height, cell_width].sort.shift * 0.8\n end",
"def emit_text_span_strong(colspan, textline, text_class=nil, options_hash={})\n if text_class\n options_hash[:class] = text_class\n end\n div :class => \"column span-#{colspan}\" do\n strong options_hash do\n text(textline)\n end\n end\n end",
"def change_font_size\n @font_size = 20*(@size_slider.value*2)+20\n #Use change_font_decor to maintain if font is bold or not\n change_font_decor\n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def change_font_size(font_size = 10)\n validate_worksheet\n raise 'Argument must be a number' unless font_size.is_a?(Integer) || font_size.is_a?(Float)\n\n font = get_cell_font.dup\n font.set_size(font_size)\n update_font_references(font)\n end",
"def setlinewidth(*)\n super\n end",
"def reset_table_cell\n mixin({\n text_align: 'left',\n font_weight: 'normal',\n vertical_align: 'middle'\n })\n end",
"def no_strikethrough\n reset_prev_formatting self, :strikethrough\n end",
"def set_line_style(width=1,dot_size=0)\n @line_width = width\n @line_dot_size = dot_size\n end",
"def use_default_style!\n @use_default_style = true\n end",
"def configure_cell_attributes(workbook_cell)\n workbook_cell.format = template.formats[@cell.xpath(\"@table:style-name\").to_s]\n workbook_cell.colspan = @cell.xpath(\"@table:number-columns-spanned\").to_s\n workbook_cell.rowspan = @cell.xpath(\"@table:number-rows-spanned\").to_s\n end",
"def style_on(*v)\n styles = v.map { |i| EscSequence::STYLE[i.to_sym] }.transpose\n \"\\e[\" << styles[0].join(';') << 'm'\n end",
"def font(row, col, sheet=nil)\n sheet = @default_sheet unless sheet\n read_cells(sheet) unless @cells_read[sheet]\n row,col = normalize(row,col)\n s_attribute = @s_attribute[sheet][[row,col]]\n s_attribute ||= 0\n s_attribute = s_attribute.to_i\n @style_definitions[s_attribute]\n end",
"def settextalign(*)\n super\n end"
] | [
"0.7695589",
"0.7397015",
"0.7152001",
"0.6990113",
"0.66901463",
"0.66901463",
"0.63874716",
"0.637665",
"0.6372925",
"0.6372925",
"0.63182336",
"0.6254913",
"0.61407024",
"0.6133203",
"0.6093899",
"0.60723346",
"0.60723346",
"0.59276927",
"0.58676517",
"0.5845751",
"0.5845751",
"0.58191836",
"0.57837224",
"0.5740428",
"0.55542123",
"0.55540085",
"0.55540085",
"0.5551214",
"0.55219495",
"0.55138284",
"0.54823995",
"0.54621094",
"0.5445998",
"0.5430654",
"0.54041654",
"0.54001564",
"0.5375739",
"0.5375491",
"0.53697693",
"0.5356219",
"0.5354512",
"0.5344812",
"0.5344812",
"0.53319305",
"0.5307129",
"0.53051674",
"0.5291981",
"0.52890795",
"0.5288736",
"0.52626264",
"0.5238949",
"0.5226564",
"0.5196808",
"0.5194131",
"0.5176479",
"0.5167873",
"0.51599026",
"0.51477265",
"0.5137757",
"0.5120887",
"0.5113022",
"0.51105195",
"0.5105386",
"0.5105386",
"0.50937647",
"0.50545037",
"0.50545037",
"0.5050825",
"0.503374",
"0.5030362",
"0.5022614",
"0.50124234",
"0.5006659",
"0.49764672",
"0.49764672",
"0.49668708",
"0.49486238",
"0.49481052",
"0.49304482",
"0.4922504",
"0.4922504",
"0.4907662",
"0.49032804",
"0.48976997",
"0.48844948",
"0.48735836",
"0.48346332",
"0.48336244",
"0.48277354",
"0.48277354",
"0.48221463",
"0.479901",
"0.47978455",
"0.47852916",
"0.4783099",
"0.4781748",
"0.4777849",
"0.47765455",
"0.47755715"
] | 0.826207 | 1 |
Helper method to update the font array and xf array | def update_font_references(modified_font)
xf = workbook.register_new_font(modified_font, get_cell_xf)
self.style_index = workbook.register_new_xf(xf)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def prepare_fonts # :nodoc:\n fonts = {}\n\n @xf_formats.each { |format| format.set_font_info(fonts) }\n\n @font_count = fonts.size\n\n # For the DXF formats we only need to check if the properties have changed.\n @dxf_formats.each do |format|\n # The only font properties that can change for a DXF format are: color,\n # bold, italic, underline and strikethrough.\n format.has_dxf_font(true) if format.color? || format.bold? || format.italic? || format.underline? || format.strikeout?\n end\n end",
"def update_font\n self.contents.font.name = @window.fontName\n #self.recalculate_maxlength\n self.refresh\n end",
"def change_font(font)\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, font_id())\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id())\n xf[:fontId] = Integer(font_id.to_i)\n # Modify xf array and retrieve new xf id\n @style_index = modify_xf(@workbook, xf)\n end",
"def initialize_fonts(application)\r\n\t[12, 14, 16, 18, 24].each() do |size|\r\n\t\tFONT[\"Courier #{size}\"] = FXFont.new(application, 'Courier New', size)\r\n\t\tFONT[\"Helvetica #{size}\"] = FXFont.new(application, 'Helvetica', size)\r\n\tend\r\n\t\r\n\tFONT['Courier 24 bold'] = FXFont.new(application, 'Courier New', 24, FXFont::Bold)\r\nend",
"def interpret(ps)\n raise Error unless ps.fonts.has_key?(@index)\n ps.font = ps.fonts[@index]\n end",
"def modify_font(workbook, style_index)\n # xf_obj = workbook.get_style(style_index)\n xf = workbook.get_style_attributes(workbook.get_style(style_index))\n\n #modify fonts array\n font_id = xf[:fontId]\n font = workbook.fonts[font_id.to_s][:font]\n\n #else, just change the attribute itself, done in calling method.\n if workbook.fonts[font_id.to_s][:count] > 1 || font_id == 0\n old_size = workbook.fonts.size.to_s\n workbook.fonts[old_size] = {}\n workbook.fonts[old_size][:font] = deep_copy(font)\n workbook.fonts[old_size][:count] = 1\n workbook.fonts[font_id.to_s][:count] -= 1\n\n #modify styles array\n font_id = old_size\n\n if workbook.cell_xfs[:xf].is_a?Array\n workbook.cell_xfs[:xf] << deep_copy({:attributes=>xf})\n else\n workbook.cell_xfs[:xf] = [workbook.cell_xfs[:xf], deep_copy({:attributes=>xf})]\n end\n\n xf = workbook.get_style_attributes(workbook.cell_xfs[:xf].last)\n xf[:fontId] = font_id\n xf[:applyFont] = '1'\n workbook.cell_xfs[:attributes][:count] += 1\n return workbook.cell_xfs[:xf].size-1 #returns new style_index\n else\n return style_index\n end\n end",
"def ctrlSetFont _obj, _args\n \"_obj ctrlSetFont _args;\" \n end",
"def load_fonts\n\n # Load fonts.\n self.load_single_font('Across The Road')\n self.load_single_font('Alabama')\n self.load_single_font('Arial')\n self.load_single_font('Arial Narrow')\n self.load_single_font('Arty Signature')\n self.load_single_font('Asem Kandis')\n self.load_single_font('Autograf')\n self.load_single_font('Born Ready')\n self.load_single_font('Brittany Signature')\n self.load_single_font('Bulgatti')\n self.load_single_font('Courier New')\n self.load_single_font('Estelly')\n self.load_single_font('Friday Vibes')\n self.load_single_font('From Skyler')\n self.load_single_font('Gallatone')\n self.load_single_font('Halimun')\n self.load_single_font('Hello Santtiny')\n self.load_single_font('Just Realize')\n self.load_single_font('Just Signature')\n self.load_single_font('Mayestica')\n self.load_single_font('Menlo')\n self.load_single_font('Notera')\n self.load_single_font('Prestige Signature')\n self.load_single_font('Reinata')\n self.load_single_font('Santos Dumont')\n self.load_single_font('SF Mono')\n self.load_single_font('Shopping List')\n self.load_single_font('Signatures')\n self.load_single_font('Signerica')\n self.load_single_font('Silver Pen')\n self.load_single_font('Sophistica')\n self.load_single_font('Source Code Pro')\n self.load_single_font('Southampton')\n self.load_single_font('Thankfully')\n self.load_single_font('The Jacklyn')\n self.load_single_font('Tomatoes')\n self.load_single_font('Wanted Signature')\n self.load_single_font('White Angelica')\n self.load_single_font('Whitney')\n self.load_single_font('Whitney Bold')\n self.load_single_font('Whitney Index Rounded')\n self.load_single_font('Whitney Index Squared')\n self.load_single_font('Xtreem')\n self.load_single_font('Gotham Condensed')\n\n end",
"def reset_font_settings(*args, &block)\n super(*args, &block)\n set_data_font(@maqj_font_data_type) if @maqj_font_data_type\n end",
"def init_font(font)\n @font=font\n dc = Wx::WindowDC.new(self)\n dc.set_font(font)\n @asc_width, asc_h = dc.get_text_extent(\"@\")[0,2]\n @asc_width+2 # compact, but not too much so\n @hex_width, hex_h = dc.get_text_extent(\"@@\")[0,2]\n @txt_height = (hex_h > asc_h)? hex_h : asc_h\n @addr_width = dc.get_text_extent(@data.size.to_s(16).rjust(4,'@'))[0]\n @row_height = @txt_height\n\n update_dimensions() if @started\n end",
"def reset_font_settings; set_data_font(:scene_label); end",
"def update!(**args)\n @font_id = args[:font_id] if args.key?(:font_id)\n @font_name = args[:font_name] if args.key?(:font_name)\n end",
"def ctrlSetFontH6 _obj, _args\n \"_obj ctrlSetFontH6 _args;\" \n end",
"def change_font_name(fontname)\n @font_name = fontname\n @text_entry.update_font\n self.redraw\n end",
"def set_font\n font_families.update(\n 'HealthQuestPDF' => {\n normal: HealthQuest::Engine.root.join('lib', 'fonts', 'sourcesanspro-regular-webfont.ttf'),\n medium: HealthQuest::Engine.root.join('lib', 'fonts', 'sourcesanspro-bold-webfont.ttf'),\n bold: HealthQuest::Engine.root.join('lib', 'fonts', 'bitter-bold.ttf')\n }\n )\n font 'HealthQuestPDF'\n end",
"def ctrlSetFontPB _obj, _args\n \"_obj ctrlSetFontPB _args;\" \n end",
"def ctrlSetFontH3 _obj, _args\n \"_obj ctrlSetFontH3 _args;\" \n end",
"def change_font_name(font_name='Verdana')\n validate_worksheet\n # Get copy of font object with modified name\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_font(font)\n end",
"def another_print_fonts\n for index in 0..@fonts.size do\n puts \"#{index} -> #{@fonts[index]}\" # use of templating\n end\nend",
"def build_fonts(raw_fonts)\n wrapped_fonts = raw_fonts.map { |label, font|\n [label, PDF::Reader::Font.new(@objects, @objects.deref(font))]\n }\n\n ::Hash[wrapped_fonts]\n end",
"def build_fonts(raw_fonts)\n wrapped_fonts = raw_fonts.map { |label, font|\n [label, PDF::Reader::Font.new(@objects, @objects.deref(font))]\n }\n\n ::Hash[wrapped_fonts]\n end",
"def set_font_path()\n Dir.entries(@output_styles).each do |filename|\n next if filename =~ /^\\.\\.?$/\n filepath = \"#{@output_styles}/#{filename}\"\n\n text = File.read(filepath)\n new_text = text.gsub(/..\\/font/, \"#{@fonts_path}\")\n File.write(filepath, new_text)\n end\n end",
"def ctrlSetFontH6B _obj, _args\n \"_obj ctrlSetFontH6B _args;\" \n end",
"def ctrlSetFontH4 _obj, _args\n \"_obj ctrlSetFontH4 _args;\" \n end",
"def ctrlSetFontP _obj, _args\n \"_obj ctrlSetFontP _args;\" \n end",
"def load_fonts\n @@fonts[\"game\"] = Gosu::Font.new(80)\n @@fonts[\"menu\"] = Gosu::Font.new(40)\n @@fonts[\"notification\"] = Gosu::Font.new(25)\n @@fonts[\"big\"] = Gosu::Font.new(20)\n @@fonts[\"small\"] = Gosu::Font.new(14)\n @@fonts[\"button\"] = Gosu::Font.new(15)\n end",
"def ctrlSetFontH2 _obj, _args\n \"_obj ctrlSetFontH2 _args;\" \n end",
"def change_row_font(row, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(row)\n increase_rows(row)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n @row_styles[(row+1).to_s][:style] = modify_xf(@workbook, xf)\n\n if @sheet_data[row].nil?\n @sheet_data[row] = []\n end\n\n @sheet_data[Integer(row)].each do |c|\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def ctrlSetFontH3B _obj, _args\n \"_obj ctrlSetFontH3B _args;\" \n end",
"def setfont(*args)\n @p.setfont(self, *args)\n end",
"def reset_font_settings(*args, &block)\n super(*args, &block)\n self.contents.font.name = MARW_CONFIGURATION[:window_fontname] unless\n MARW_CONFIGURATION[:window_fontname].empty?\n self.contents.font.size = MARW_CONFIGURATION[:window_fontsize] unless\n MARW_CONFIGURATION[:window_fontsize] < 8\n end",
"def set_fonts\n font_families.update(\"Arial\" => {\n :normal => \"#{Rails.root}/vendor/assets/fonts/Arial.ttf\",\n :bold => \"#{Rails.root}/vendor/assets/fonts/Arial-Bold.ttf\"\n })\n font \"Arial\"\n end",
"def ctrlSetFontH2B _obj, _args\n \"_obj ctrlSetFontH2B _args;\" \n end",
"def explode_fonts\n puts 'Explode fonts...'.yellow\n FileUtils.rm_rf(@glyphs)\n FileUtils.mkdir_p(@glyphs)\n Dir.chdir(@sources) do\n for font_fullname in Dir['*'].sort\n font_dir = @sources.join(font_fullname)\n font_name = font_fullname.split('-')[1..-1].join('-')\n font_file = font_dir.join('font.svg')\n config_file = font_dir.join('config.yml')\n if font_file.exist? && config_file.exist?\n command(\"node_modules/.bin/svg-font-dump -n -c #{config_file} -f -i #{font_file} -o #{@glyphs}\")\n end\n end\n end\n # Removes weird icons\n Dir.chdir(@glyphs) do\n Dir.glob('{*,.*}').sort.collect do |cf|\n next if cf.to_s =~ /\\A[a-z0-9]+(\\-[a-z0-9]+)*\\.svg\\z/\n next if ['..', '.'].include? cf\n puts ' - ' + \"Remove #{cf}\".red\n FileUtils.rm_f(cf)\n end\n end\n end",
"def set_font_set_and_project\n @project = Project.find(params[:project_id])\n @font_set = @project.font_sets.find(params[:id])\n @font_families = get_font_family_array(@project)\n end",
"def ctrlSetFontH5 _obj, _args\n \"_obj ctrlSetFontH5 _args;\" \n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def set_font(font_name)\n itr = @form.getFields.keySet.iterator\n while itr.hasNext\n field = itr.next\n @form.setFieldProperty(field, 'textfont', create_font(font_name), nil)\n end\n end",
"def font=(f)\n\t\traise 'Font must respond to char_bitmap' if !f.nil? && !f.respond_to?(:char_bitmap)\n\t\t@font = f\n\tend",
"def ctrlSetFontH4B _obj, _args\n \"_obj ctrlSetFontH4B _args;\" \n end",
"def font=(value)\n @font = value\n end",
"def ctrlSetFontH1 _obj, _args\n \"_obj ctrlSetFontH1 _args;\" \n end",
"def font idx\n @fonts[idx]\n end",
"def setup\r\n background 0\r\n font = load_font \"data/DroidSansMono-24.vlw\"\r\n text_font(font)\r\n\r\n @c = []\r\n generate\r\n end",
"def maiil_reset_font\n fn = MA_IconItemList::NUM_FONTNAME ? MA_IconItemList::NUM_FONTNAME : Font.default_name\n contents.font = Font.new(fn, MA_IconItemList::NUM_FONTSIZE) # name and size\n contents.font.bold = MA_IconItemList::NUM_BOLD # Set Bold\n contents.font.italic = MA_IconItemList::NUM_ITALIC # Set Italic\n contents.font.shadow = MA_IconItemList::NUM_SHADOW # Set Shadow\n contents.font.outline = MA_IconItemList::NUM_OUTLINE # Set outline\n oc = MA_IconItemList::NUM_OUT_COLOUR ? MA_IconItemList::NUM_OUT_COLOUR : Font.default_out_color\n contents.font.out_color = text_color(oc) # outline color\n end",
"def build_common\n @font.encoding = 'UniJIS-UCS2-H'\n @font.names = NAMES\n # Initialize font dictionary.\n @dict.registry = 'Adobe'\n @dict.ordering = 'Japan1'\n @dict.supplement = 2\n @dict.default_width = 1000\n @dict.default_vertical_widths = [880, -1000]\n # Initialize font descriptor.\n @desc.b_box = [0, -136, 1000, 859]\n @desc.italic_angle = 0\n @desc.ascent = 859\n @desc.descent = -140\n @desc.cap_height = 769\n @desc.stem_v = 78\n end",
"def test_fontDiff\n f = Font.new()\n f.color = Color.power_up_color()\n f.size = 20\n f.bold = true\n [@window, @sprite, @bitmap].each{|container|\n uc = UCNumericUpDown.new(container, Rect.new(0, 48, 100, 24), 5, nil, 0, 99, 1, 0, f)\n uc.draw()\n }\n return true\n end",
"def change_font_decor\n #If toggled to on, font is bold, otherwise not bold.\n # Best case here is to use NSAttributedString\n if @switch_font.on? == true\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}-Bold\",size:@font_size)\n else\n @font_label.font = UIFont.fontWithName(\"#{@font_label.text}\",size:@font_size)\n end\n end",
"def ctrlSetFontH1B _obj, _args\n \"_obj ctrlSetFontH1B _args;\" \n end",
"def settextfontprec(*)\n super\n end",
"def ctrlSetFontH5B _obj, _args\n \"_obj ctrlSetFontH5B _args;\" \n end",
"def set_font(key, font)\r\n # Add :Font key to content hash unless there is no key\r\n unless @content[pn(:Resources)].has_key?(pn(:Font))\r\n @content[pn(:Resources)].update(pn(:Font) => pd)\r\n end\r\n # Add font symbol to :Font hash\r\n unless @content[pn(:Resources)][pn(:Font)].has_key?(key)\r\n @content[pn(:Resources)][pn(:Font)].update(key => font.reference)\r\n end\r\n end",
"def font_setup\n %Q(font \"#{@font},#{@fontsize}\")\n end",
"def normalize_font(source, destination, options = {})\n hexa = options[:hexa] || {}\n interm = source.dirname.join(source.basename('.*').to_s + '.svg')\n if interm.exist?\n puts \"No conversion needed for #{interm}\".white\n else\n command(\"fontforge -quiet -script #{@convert_script} #{source} svg\")\n end\n i = File.open(interm)\n doc = Nokogiri::XML(i) do |config|\n config.nonet.strict.noblanks\n end\n i.close\n doc.root.xpath('//svg:glyph[@d]', SVG_NAMESPACES).each do |glyph|\n name = glyph.attr('glyph-name')\n unicode = glyph.attr('unicode').to_s\n if unicode[0]\n hex = '%4.4x' % unicode[0].ord\n name = hexa[hex] if hexa[hex]\n end\n if name && name =~ /^[a-z0-9]+((\\_|\\-)[a-z0-9]+)*$/\n glyph['glyph-name'] = name.gsub(/[^a-z0-9]+/, '-')\n else\n puts \" - weird name (#{unicode.inspect}): #{name.inspect.red} \"\n end\n end\n doc.root.default_namespace = SVG_NAMESPACES[:svg]\n for name, url in SVG_NAMESPACES\n doc.root.add_namespace(name.to_s, url)\n end\n File.open(destination, 'wb') do |f|\n f.write doc.to_s\n end\n end",
"def test_fontDiff\n f = Font.new()\n f.color = Color.power_up_color()\n f.size = 12\n f.bold = true\n [@window, @sprite, @bitmap].each{|container|\n uc = UCGraph.new(container, 300, 180, 50, @elements, nil, 100, -100, f,\n Color.hp_gauge_color1, Color.hp_gauge_color2,\n Color.mp_gauge_color1, Color.mp_gauge_color2)\n uc.draw()\n }\n return true\n end",
"def setup_ft\n unless face\n @face = FT2::Face.load(@font)\n face.set_char_size 0, size * ONE64POINT, RESOLUTION, RESOLUTION\n end\n end",
"def change_font_italics(italicized=false)\n validate_worksheet\n # Get copy of font object with modified italics settings\n font = deep_copy(workbook.fonts[font_id().to_s][:font])\n font = modify_font_italics(font, italicized)\n # Update font and xf array\n change_font(font)\n end",
"def reset_font_settings; set_data_font(:category_label); end",
"def refresh\n contents.clear\n for i in 0...(@cw_data.size / 2)\n draw_text_ex(0, line_height*i, \"\\\\c[16]\" + @cw_data[i*2])\n reset_font_settings\n draw_text(0, line_height*i, contents_width, line_height, value(@cw_data[i*2 + 1]), 2)\n end\n end",
"def info_font(*args)\n @p.info_font(self, *args)\n end",
"def add_font_family(pdf:, name:, root:)\n if root == 'OldStandard'\n # No Bold-Italic variant in OldStandard\n bold_italic_name = \"#{root}-Bold.ttf\"\n else\n bold_italic_name = \"#{root}-BoldItalic.ttf\"\n end\n\n pdf.font_families.update(\n name => {\n normal: Rails.root.join('vendor', 'fonts', root,\n \"#{root}-Regular.ttf\").to_s,\n italic: Rails.root.join('vendor', 'fonts', root,\n \"#{root}-Italic.ttf\").to_s,\n bold: Rails.root.join('vendor', 'fonts', root,\n \"#{root}-Bold.ttf\").to_s,\n bold_italic: Rails.root.join('vendor', 'fonts', root,\n bold_italic_name).to_s\n }\n )\n end",
"def font=(font)\n set_font(font)\n generate_buffers\n end",
"def init_font_families\n # Set the known family groups. These font families will be used to\n # enable bold and italic markers to be included within text\n # streams. HTML forms will be used... <b></b> <i></i>\n @font_families[\"Helvetica\"] =\n {\n \"b\" => 'Helvetica-Bold',\n \"i\" => 'Helvetica-Oblique',\n \"bi\" => 'Helvetica-BoldOblique',\n \"ib\" => 'Helvetica-BoldOblique'\n }\n @font_families['Courier'] =\n {\n \"b\" => 'Courier-Bold',\n \"i\" => 'Courier-Oblique',\n \"bi\" => 'Courier-BoldOblique',\n \"ib\" => 'Courier-BoldOblique'\n }\n @font_families['Times-Roman'] =\n {\n \"b\" => 'Times-Bold',\n \"i\" => 'Times-Italic',\n \"bi\" => 'Times-BoldItalic',\n \"ib\" => 'Times-BoldItalic'\n }\n end",
"def parse_font_table(src, current_pos)\n group = 1\n\n font = nil\n in_extra = nil\n\n while (true)\n case(src[current_pos])\n when '{' then\n font = RubyRTF::Font.new if group == 1\n in_extra = nil\n\n group += 1\n\n when '}' then\n group -= 1\n\n if group <= 1\n break if font.nil?\n font.cleanup_names\n @doc.font_table[font.number] = font\n end\n\n in_extra = nil\n\n break if group == 0\n\n when '\\\\' then\n ctrl, val, current_pos = parse_control(src, current_pos + 1)\n\n font = RubyRTF::Font.new if font.nil?\n\n case(ctrl)\n when :f then font.number = val\n when :fprq then font.pitch = val\n when :fcharset then font.character_set = val\n when *[:flomajor, :fhimajor, :fdbmajor, :fbimajor,\n :flominor, :fhiminor, :fdbminor, :fbiminor] then\n font.theme = ctrl.to_s[1..-1].to_sym\n\n when *[:falt, :fname, :panose] then in_extra = ctrl\n else\n cmd = ctrl.to_s[1..-1].to_sym\n if RubyRTF::Font::FAMILIES.include?(cmd)\n font.family_command = cmd\n end\n end\n\n # need to next as parse_control will leave current_pos at the\n # next character already so current_pos += 1 below would move us too far\n next\n when *[\"\\r\", \"\\n\"] then ;\n else\n case(in_extra)\n when :falt then font.alternate_name << src[current_pos]\n when :panose then font.panose << src[current_pos]\n when :fname then font.non_tagged_name << src[current_pos]\n when nil then font.name << src[current_pos]\n end\n end\n current_pos += 1\n end\n\n current_pos\n end",
"def setFontLib(name,location)\n\t\t\t@font = name\n\t\t\t@font_lib = CaptchaFont.new(name,location)\n\n\t\tend",
"def time_to_font(time,font)\n output = []\n for i in 0...time.length()\n converted_font = convert_1_number_to_1_font(time[i],font)\n output.append(converted_font)\n end\n return output\nend",
"def type1_font_names\n if @options[:built_in_fonts]\n PdfK::FONT_NAMES\n else\n AFM::font_names\n end\n end",
"def initiate_library\n # do nothing if the library is already initiated\n return false if FONTS_LIBRARY.key? :Helvetica\n # font metrics objects to be used\n times_metrics = { \"\\u0000\" => { wx: 250, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 333, boundingbox: [130, -9, 238, 676] }, '\"' => { wx: 408, boundingbox: [77, 431, 331, 676] }, '#' => { wx: 500, boundingbox: [5, 0, 496, 662] }, '$' => { wx: 500, boundingbox: [44, -87, 457, 727] }, '%' => { wx: 833, boundingbox: [61, -13, 772, 676] }, '&' => { wx: 778, boundingbox: [42, -13, 750, 676] }, \"'\" => { wx: 333, boundingbox: [79, 433, 218, 676] }, '(' => { wx: 333, boundingbox: [48, -177, 304, 676] }, ')' => { wx: 333, boundingbox: [29, -177, 285, 676] }, '*' => { wx: 500, boundingbox: [69, 265, 432, 676] }, '+' => { wx: 564, boundingbox: [30, 0, 534, 506] }, ',' => { wx: 250, boundingbox: [56, -141, 195, 102] }, '-' => { wx: 333, boundingbox: [39, 194, 285, 257] }, '.' => { wx: 250, boundingbox: [70, -11, 181, 100] }, '/' => { wx: 278, boundingbox: [-9, -14, 287, 676] }, '0' => { wx: 500, boundingbox: [24, -14, 476, 676] }, '1' => { wx: 500, boundingbox: [111, 0, 394, 676] }, '2' => { wx: 500, boundingbox: [30, 0, 475, 676] }, '3' => { wx: 500, boundingbox: [43, -14, 431, 676] }, '4' => { wx: 500, boundingbox: [12, 0, 472, 676] }, '5' => { wx: 500, boundingbox: [32, -14, 438, 688] }, '6' => { wx: 500, boundingbox: [34, -14, 468, 684] }, '7' => { wx: 500, boundingbox: [20, -8, 449, 662] }, '8' => { wx: 500, boundingbox: [56, -14, 445, 676] }, '9' => { wx: 500, boundingbox: [30, -22, 459, 676] }, ':' => { wx: 278, boundingbox: [81, -11, 192, 459] }, ';' => { wx: 278, boundingbox: [80, -141, 219, 459] }, '<' => { wx: 564, boundingbox: [28, -8, 536, 514] }, '=' => { wx: 564, boundingbox: [30, 120, 534, 386] }, '>' => { wx: 564, boundingbox: [28, -8, 536, 514] }, '?' => { wx: 444, boundingbox: [68, -8, 414, 676] }, '@' => { wx: 921, boundingbox: [116, -14, 809, 676] }, 'A' => { wx: 722, boundingbox: [15, 0, 706, 674] }, 'B' => { wx: 667, boundingbox: [17, 0, 593, 662] }, 'C' => { wx: 667, boundingbox: [28, -14, 633, 676] }, 'D' => { wx: 722, boundingbox: [16, 0, 685, 662] }, 'E' => { wx: 611, boundingbox: [12, 0, 597, 662] }, 'F' => { wx: 556, boundingbox: [12, 0, 546, 662] }, 'G' => { wx: 722, boundingbox: [32, -14, 709, 676] }, 'H' => { wx: 722, boundingbox: [19, 0, 702, 662] }, 'I' => { wx: 333, boundingbox: [18, 0, 315, 662] }, 'J' => { wx: 389, boundingbox: [10, -14, 370, 662] }, 'K' => { wx: 722, boundingbox: [34, 0, 723, 662] }, 'L' => { wx: 611, boundingbox: [12, 0, 598, 662] }, 'M' => { wx: 889, boundingbox: [12, 0, 863, 662] }, 'N' => { wx: 722, boundingbox: [12, -11, 707, 662] }, 'O' => { wx: 722, boundingbox: [34, -14, 688, 676] }, 'P' => { wx: 556, boundingbox: [16, 0, 542, 662] }, 'Q' => { wx: 722, boundingbox: [34, -178, 701, 676] }, 'R' => { wx: 667, boundingbox: [17, 0, 659, 662] }, 'S' => { wx: 556, boundingbox: [42, -14, 491, 676] }, 'T' => { wx: 611, boundingbox: [17, 0, 593, 662] }, 'U' => { wx: 722, boundingbox: [14, -14, 705, 662] }, 'V' => { wx: 722, boundingbox: [16, -11, 697, 662] }, 'W' => { wx: 944, boundingbox: [5, -11, 932, 662] }, 'X' => { wx: 722, boundingbox: [10, 0, 704, 662] }, 'Y' => { wx: 722, boundingbox: [22, 0, 703, 662] }, 'Z' => { wx: 611, boundingbox: [9, 0, 597, 662] }, '[' => { wx: 333, boundingbox: [88, -156, 299, 662] }, '\\\\' => { wx: 278, boundingbox: [-9, -14, 287, 676] }, ']' => { wx: 333, boundingbox: [34, -156, 245, 662] }, '^' => { wx: 469, boundingbox: [24, 297, 446, 662] }, '_' => { wx: 500, boundingbox: [0, -125, 500, -75] }, '`' => { wx: 333, boundingbox: [115, 433, 254, 676] }, 'a' => { wx: 444, boundingbox: [37, -10, 442, 460] }, 'b' => { wx: 500, boundingbox: [3, -10, 468, 683] }, 'c' => { wx: 444, boundingbox: [25, -10, 412, 460] }, 'd' => { wx: 500, boundingbox: [27, -10, 491, 683] }, 'e' => { wx: 444, boundingbox: [25, -10, 424, 460] }, 'f' => { wx: 333, boundingbox: [20, 0, 383, 683] }, 'g' => { wx: 500, boundingbox: [28, -218, 470, 460] }, 'h' => { wx: 500, boundingbox: [9, 0, 487, 683] }, 'i' => { wx: 278, boundingbox: [16, 0, 253, 683] }, 'j' => { wx: 278, boundingbox: [-70, -218, 194, 683] }, 'k' => { wx: 500, boundingbox: [7, 0, 505, 683] }, 'l' => { wx: 278, boundingbox: [19, 0, 257, 683] }, 'm' => { wx: 778, boundingbox: [16, 0, 775, 460] }, 'n' => { wx: 500, boundingbox: [16, 0, 485, 460] }, 'o' => { wx: 500, boundingbox: [29, -10, 470, 460] }, 'p' => { wx: 500, boundingbox: [5, -217, 470, 460] }, 'q' => { wx: 500, boundingbox: [24, -217, 488, 460] }, 'r' => { wx: 333, boundingbox: [5, 0, 335, 460] }, 's' => { wx: 389, boundingbox: [51, -10, 348, 460] }, 't' => { wx: 278, boundingbox: [13, -10, 279, 579] }, 'u' => { wx: 500, boundingbox: [9, -10, 479, 450] }, 'v' => { wx: 500, boundingbox: [19, -14, 477, 450] }, 'w' => { wx: 722, boundingbox: [21, -14, 694, 450] }, 'x' => { wx: 500, boundingbox: [17, 0, 479, 450] }, 'y' => { wx: 500, boundingbox: [14, -218, 475, 450] }, 'z' => { wx: 444, boundingbox: [27, 0, 418, 450] }, '{' => { wx: 480, boundingbox: [100, -181, 350, 680] }, '|' => { wx: 200, boundingbox: [67, -218, 133, 782] }, '}' => { wx: 480, boundingbox: [130, -181, 380, 680] }, '~' => { wx: 541, boundingbox: [40, 183, 502, 323] }, \"\\u00A1\" => { wx: 333, boundingbox: [97, -218, 205, 467] }, \"\\u00A2\" => { wx: 500, boundingbox: [53, -138, 448, 579] }, \"\\u00A3\" => { wx: 500, boundingbox: [12, -8, 490, 676] }, \"\\u00A4\" => { wx: 167, boundingbox: [-168, -14, 331, 676] }, \"\\u00A5\" => { wx: 500, boundingbox: [-53, 0, 512, 662] }, \"\\u00A6\" => { wx: 500, boundingbox: [7, -189, 490, 676] }, \"\\u00A7\" => { wx: 500, boundingbox: [70, -148, 426, 676] }, \"\\u00A8\" => { wx: 500, boundingbox: [-22, 58, 522, 602] }, \"\\u00A9\" => { wx: 180, boundingbox: [48, 431, 133, 676] }, \"\\u00AA\" => { wx: 444, boundingbox: [43, 433, 414, 676] }, \"\\u00AB\" => { wx: 500, boundingbox: [42, 33, 456, 416] }, \"\\u00AC\" => { wx: 333, boundingbox: [63, 33, 285, 416] }, \"\\u00AD\" => { wx: 333, boundingbox: [48, 33, 270, 416] }, \"\\u00AE\" => { wx: 556, boundingbox: [31, 0, 521, 683] }, \"\\u00AF\" => { wx: 556, boundingbox: [32, 0, 521, 683] }, \"\\u00B1\" => { wx: 500, boundingbox: [0, 201, 500, 250] }, \"\\u00B2\" => { wx: 500, boundingbox: [59, -149, 442, 676] }, \"\\u00B3\" => { wx: 500, boundingbox: [58, -153, 442, 676] }, \"\\u00B4\" => { wx: 250, boundingbox: [70, 199, 181, 310] }, \"\\u00B6\" => { wx: 453, boundingbox: [-22, -154, 450, 662] }, \"\\u00B7\" => { wx: 350, boundingbox: [40, 196, 310, 466] }, \"\\u00B8\" => { wx: 333, boundingbox: [79, -141, 218, 102] }, \"\\u00B9\" => { wx: 444, boundingbox: [45, -141, 416, 102] }, \"\\u00BA\" => { wx: 444, boundingbox: [30, 433, 401, 676] }, \"\\u00BB\" => { wx: 500, boundingbox: [44, 33, 458, 416] }, \"\\u00BC\" => { wx: 1000, boundingbox: [111, -11, 888, 100] }, \"\\u00BD\" => { wx: 1000, boundingbox: [7, -19, 994, 706] }, \"\\u00BF\" => { wx: 444, boundingbox: [30, -218, 376, 466] }, \"\\u00C1\" => { wx: 333, boundingbox: [19, 507, 242, 678] }, \"\\u00C2\" => { wx: 333, boundingbox: [93, 507, 317, 678] }, \"\\u00C3\" => { wx: 333, boundingbox: [11, 507, 322, 674] }, \"\\u00C4\" => { wx: 333, boundingbox: [1, 532, 331, 638] }, \"\\u00C5\" => { wx: 333, boundingbox: [11, 547, 322, 601] }, \"\\u00C6\" => { wx: 333, boundingbox: [26, 507, 307, 664] }, \"\\u00C7\" => { wx: 333, boundingbox: [118, 581, 216, 681] }, \"\\u00C8\" => { wx: 333, boundingbox: [18, 581, 315, 681] }, \"\\u00CA\" => { wx: 333, boundingbox: [67, 512, 266, 711] }, \"\\u00CB\" => { wx: 333, boundingbox: [52, -215, 261, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [-3, 507, 377, 678] }, \"\\u00CE\" => { wx: 333, boundingbox: [62, -165, 243, 0] }, \"\\u00CF\" => { wx: 333, boundingbox: [11, 507, 322, 674] }, \"\\u00D0\" => { wx: 1000, boundingbox: [0, 201, 1000, 250] }, \"\\u00E1\" => { wx: 889, boundingbox: [0, 0, 863, 662] }, \"\\u00E3\" => { wx: 276, boundingbox: [4, 394, 270, 676] }, \"\\u00E8\" => { wx: 611, boundingbox: [12, 0, 598, 662] }, \"\\u00E9\" => { wx: 722, boundingbox: [34, -80, 688, 734] }, \"\\u00EA\" => { wx: 889, boundingbox: [30, -6, 885, 668] }, \"\\u00EB\" => { wx: 310, boundingbox: [6, 394, 304, 676] }, \"\\u00F1\" => { wx: 667, boundingbox: [38, -10, 632, 460] }, \"\\u00F5\" => { wx: 278, boundingbox: [16, 0, 253, 460] }, \"\\u00F8\" => { wx: 278, boundingbox: [19, 0, 259, 683] }, \"\\u00F9\" => { wx: 500, boundingbox: [29, -112, 470, 551] }, \"\\u00FA\" => { wx: 722, boundingbox: [30, -10, 690, 460] }, \"\\u00FB\" => { wx: 500, boundingbox: [12, -9, 468, 683] }, \"\\xFF\" => { wx: 500, boundingbox: [0, 0, 0, 0] } }\n times_bold_metrics = { ' ' => { wx: 250, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 333, boundingbox: [81, -13, 251, 691] }, '\"' => { wx: 555, boundingbox: [83, 404, 472, 691] }, '#' => { wx: 500, boundingbox: [4, 0, 496, 700] }, '$' => { wx: 500, boundingbox: [29, -99, 472, 750] }, '%' => { wx: 1000, boundingbox: [124, -14, 877, 692] }, '&' => { wx: 833, boundingbox: [62, -16, 787, 691] }, \"'\" => { wx: 333, boundingbox: [79, 356, 263, 691] }, '(' => { wx: 333, boundingbox: [46, -168, 306, 694] }, ')' => { wx: 333, boundingbox: [27, -168, 287, 694] }, '*' => { wx: 500, boundingbox: [56, 255, 447, 691] }, '+' => { wx: 570, boundingbox: [33, 0, 537, 506] }, ',' => { wx: 250, boundingbox: [39, -180, 223, 155] }, '-' => { wx: 333, boundingbox: [44, 171, 287, 287] }, '.' => { wx: 250, boundingbox: [41, -13, 210, 156] }, '/' => { wx: 278, boundingbox: [-24, -19, 302, 691] }, '0' => { wx: 500, boundingbox: [24, -13, 476, 688] }, '1' => { wx: 500, boundingbox: [65, 0, 442, 688] }, '2' => { wx: 500, boundingbox: [17, 0, 478, 688] }, '3' => { wx: 500, boundingbox: [16, -14, 468, 688] }, '4' => { wx: 500, boundingbox: [19, 0, 475, 688] }, '5' => { wx: 500, boundingbox: [22, -8, 470, 676] }, '6' => { wx: 500, boundingbox: [28, -13, 475, 688] }, '7' => { wx: 500, boundingbox: [17, 0, 477, 676] }, '8' => { wx: 500, boundingbox: [28, -13, 472, 688] }, '9' => { wx: 500, boundingbox: [26, -13, 473, 688] }, ':' => { wx: 333, boundingbox: [82, -13, 251, 472] }, ';' => { wx: 333, boundingbox: [82, -180, 266, 472] }, '<' => { wx: 570, boundingbox: [31, -8, 539, 514] }, '=' => { wx: 570, boundingbox: [33, 107, 537, 399] }, '>' => { wx: 570, boundingbox: [31, -8, 539, 514] }, '?' => { wx: 500, boundingbox: [57, -13, 445, 689] }, '@' => { wx: 930, boundingbox: [108, -19, 822, 691] }, 'A' => { wx: 722, boundingbox: [9, 0, 689, 690] }, 'B' => { wx: 667, boundingbox: [16, 0, 619, 676] }, 'C' => { wx: 722, boundingbox: [49, -19, 687, 691] }, 'D' => { wx: 722, boundingbox: [14, 0, 690, 676] }, 'E' => { wx: 667, boundingbox: [16, 0, 641, 676] }, 'F' => { wx: 611, boundingbox: [16, 0, 583, 676] }, 'G' => { wx: 778, boundingbox: [37, -19, 755, 691] }, 'H' => { wx: 778, boundingbox: [21, 0, 759, 676] }, 'I' => { wx: 389, boundingbox: [20, 0, 370, 676] }, 'J' => { wx: 500, boundingbox: [3, -96, 479, 676] }, 'K' => { wx: 778, boundingbox: [30, 0, 769, 676] }, 'L' => { wx: 667, boundingbox: [19, 0, 638, 676] }, 'M' => { wx: 944, boundingbox: [14, 0, 921, 676] }, 'N' => { wx: 722, boundingbox: [16, -18, 701, 676] }, 'O' => { wx: 778, boundingbox: [35, -19, 743, 691] }, 'P' => { wx: 611, boundingbox: [16, 0, 600, 676] }, 'Q' => { wx: 778, boundingbox: [35, -176, 743, 691] }, 'R' => { wx: 722, boundingbox: [26, 0, 715, 676] }, 'S' => { wx: 556, boundingbox: [35, -19, 513, 692] }, 'T' => { wx: 667, boundingbox: [31, 0, 636, 676] }, 'U' => { wx: 722, boundingbox: [16, -19, 701, 676] }, 'V' => { wx: 722, boundingbox: [16, -18, 701, 676] }, 'W' => { wx: 1000, boundingbox: [19, -15, 981, 676] }, 'X' => { wx: 722, boundingbox: [16, 0, 699, 676] }, 'Y' => { wx: 722, boundingbox: [15, 0, 699, 676] }, 'Z' => { wx: 667, boundingbox: [28, 0, 634, 676] }, '[' => { wx: 333, boundingbox: [67, -149, 301, 678] }, '\\\\' => { wx: 278, boundingbox: [-25, -19, 303, 691] }, ']' => { wx: 333, boundingbox: [32, -149, 266, 678] }, '^' => { wx: 581, boundingbox: [73, 311, 509, 676] }, '_' => { wx: 500, boundingbox: [0, -125, 500, -75] }, '`' => { wx: 333, boundingbox: [70, 356, 254, 691] }, 'a' => { wx: 500, boundingbox: [25, -14, 488, 473] }, 'b' => { wx: 556, boundingbox: [17, -14, 521, 676] }, 'c' => { wx: 444, boundingbox: [25, -14, 430, 473] }, 'd' => { wx: 556, boundingbox: [25, -14, 534, 676] }, 'e' => { wx: 444, boundingbox: [25, -14, 426, 473] }, 'f' => { wx: 333, boundingbox: [14, 0, 389, 691] }, 'g' => { wx: 500, boundingbox: [28, -206, 483, 473] }, 'h' => { wx: 556, boundingbox: [16, 0, 534, 676] }, 'i' => { wx: 278, boundingbox: [16, 0, 255, 691] }, 'j' => { wx: 333, boundingbox: [-57, -203, 263, 691] }, 'k' => { wx: 556, boundingbox: [22, 0, 543, 676] }, 'l' => { wx: 278, boundingbox: [16, 0, 255, 676] }, 'm' => { wx: 833, boundingbox: [16, 0, 814, 473] }, 'n' => { wx: 556, boundingbox: [21, 0, 539, 473] }, 'o' => { wx: 500, boundingbox: [25, -14, 476, 473] }, 'p' => { wx: 556, boundingbox: [19, -205, 524, 473] }, 'q' => { wx: 556, boundingbox: [34, -205, 536, 473] }, 'r' => { wx: 444, boundingbox: [29, 0, 434, 473] }, 's' => { wx: 389, boundingbox: [25, -14, 361, 473] }, 't' => { wx: 333, boundingbox: [20, -12, 332, 630] }, 'u' => { wx: 556, boundingbox: [16, -14, 537, 461] }, 'v' => { wx: 500, boundingbox: [21, -14, 485, 461] }, 'w' => { wx: 722, boundingbox: [23, -14, 707, 461] }, 'x' => { wx: 500, boundingbox: [12, 0, 484, 461] }, 'y' => { wx: 500, boundingbox: [16, -205, 480, 461] }, 'z' => { wx: 444, boundingbox: [21, 0, 420, 461] }, '{' => { wx: 394, boundingbox: [22, -175, 340, 698] }, '|' => { wx: 220, boundingbox: [66, -218, 154, 782] }, '}' => { wx: 394, boundingbox: [54, -175, 372, 698] }, '~' => { wx: 520, boundingbox: [29, 173, 491, 333] }, \"\\u00A1\" => { wx: 333, boundingbox: [82, -203, 252, 501] }, \"\\u00A2\" => { wx: 500, boundingbox: [53, -140, 458, 588] }, \"\\u00A3\" => { wx: 500, boundingbox: [21, -14, 477, 684] }, \"\\u00A4\" => { wx: 167, boundingbox: [-168, -12, 329, 688] }, \"\\u00A5\" => { wx: 500, boundingbox: [-64, 0, 547, 676] }, \"\\u00A6\" => { wx: 500, boundingbox: [0, -155, 498, 706] }, \"\\u00A7\" => { wx: 500, boundingbox: [57, -132, 443, 691] }, \"\\u00A8\" => { wx: 500, boundingbox: [-26, 61, 526, 613] }, \"\\u00A9\" => { wx: 278, boundingbox: [75, 404, 204, 691] }, \"\\u00AA\" => { wx: 500, boundingbox: [32, 356, 486, 691] }, \"\\u00AB\" => { wx: 500, boundingbox: [23, 36, 473, 415] }, \"\\u00AC\" => { wx: 333, boundingbox: [51, 36, 305, 415] }, \"\\u00AD\" => { wx: 333, boundingbox: [28, 36, 282, 415] }, \"\\u00AE\" => { wx: 556, boundingbox: [14, 0, 536, 691] }, \"\\u00AF\" => { wx: 556, boundingbox: [14, 0, 536, 691] }, \"\\u00B1\" => { wx: 500, boundingbox: [0, 181, 500, 271] }, \"\\u00B2\" => { wx: 500, boundingbox: [47, -134, 453, 691] }, \"\\u00B3\" => { wx: 500, boundingbox: [45, -132, 456, 691] }, \"\\u00B4\" => { wx: 250, boundingbox: [41, 248, 210, 417] }, \"\\u00B6\" => { wx: 540, boundingbox: [0, -186, 519, 676] }, \"\\u00B7\" => { wx: 350, boundingbox: [35, 198, 315, 478] }, \"\\u00B8\" => { wx: 333, boundingbox: [79, -180, 263, 155] }, \"\\u00B9\" => { wx: 500, boundingbox: [14, -180, 468, 155] }, \"\\u00BA\" => { wx: 500, boundingbox: [14, 356, 468, 691] }, \"\\u00BB\" => { wx: 500, boundingbox: [27, 36, 477, 415] }, \"\\u00BC\" => { wx: 1000, boundingbox: [82, -13, 917, 156] }, \"\\u00BD\" => { wx: 1000, boundingbox: [7, -29, 995, 706] }, \"\\u00BF\" => { wx: 500, boundingbox: [55, -201, 443, 501] }, \"\\u00C1\" => { wx: 333, boundingbox: [8, 528, 246, 713] }, \"\\u00C2\" => { wx: 333, boundingbox: [86, 528, 324, 713] }, \"\\u00C3\" => { wx: 333, boundingbox: [-2, 528, 335, 704] }, \"\\u00C4\" => { wx: 333, boundingbox: [-16, 547, 349, 674] }, \"\\u00C5\" => { wx: 333, boundingbox: [1, 565, 331, 637] }, \"\\u00C6\" => { wx: 333, boundingbox: [15, 528, 318, 691] }, \"\\u00C7\" => { wx: 333, boundingbox: [103, 536, 258, 691] }, \"\\u00C8\" => { wx: 333, boundingbox: [-2, 537, 335, 667] }, \"\\u00CA\" => { wx: 333, boundingbox: [60, 527, 273, 740] }, \"\\u00CB\" => { wx: 333, boundingbox: [68, -218, 294, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [-13, 528, 425, 713] }, \"\\u00CE\" => { wx: 333, boundingbox: [90, -193, 319, 24] }, \"\\u00CF\" => { wx: 333, boundingbox: [-2, 528, 335, 704] }, \"\\u00D0\" => { wx: 1000, boundingbox: [0, 181, 1000, 271] }, \"\\u00E1\" => { wx: 1000, boundingbox: [4, 0, 951, 676] }, \"\\u00E3\" => { wx: 300, boundingbox: [-1, 397, 301, 688] }, \"\\u00E8\" => { wx: 667, boundingbox: [19, 0, 638, 676] }, \"\\u00E9\" => { wx: 778, boundingbox: [35, -74, 743, 737] }, \"\\u00EA\" => { wx: 1000, boundingbox: [22, -5, 981, 684] }, \"\\u00EB\" => { wx: 330, boundingbox: [18, 397, 312, 688] }, \"\\u00F1\" => { wx: 722, boundingbox: [33, -14, 693, 473] }, \"\\u00F5\" => { wx: 278, boundingbox: [16, 0, 255, 461] }, \"\\u00F8\" => { wx: 278, boundingbox: [-22, 0, 303, 676] }, \"\\u00F9\" => { wx: 500, boundingbox: [25, -92, 476, 549] }, \"\\u00FA\" => { wx: 722, boundingbox: [22, -14, 696, 473] }, \"\\u00FB\" => { wx: 556, boundingbox: [19, -12, 517, 691] }, \"\\xFF\" => { wx: 500, boundingbox: [0, 0, 0, 0] } }\n times_italic_metrics = { ' ' => { wx: 250, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 333, boundingbox: [39, -11, 302, 667] }, '\"' => { wx: 420, boundingbox: [144, 421, 432, 666] }, '#' => { wx: 500, boundingbox: [2, 0, 540, 676] }, '$' => { wx: 500, boundingbox: [31, -89, 497, 731] }, '%' => { wx: 833, boundingbox: [79, -13, 790, 676] }, '&' => { wx: 778, boundingbox: [76, -18, 723, 666] }, \"'\" => { wx: 333, boundingbox: [151, 436, 290, 666] }, '(' => { wx: 333, boundingbox: [42, -181, 315, 669] }, ')' => { wx: 333, boundingbox: [16, -180, 289, 669] }, '*' => { wx: 500, boundingbox: [128, 255, 492, 666] }, '+' => { wx: 675, boundingbox: [86, 0, 590, 506] }, ',' => { wx: 250, boundingbox: [-4, -129, 135, 101] }, '-' => { wx: 333, boundingbox: [49, 192, 282, 255] }, '.' => { wx: 250, boundingbox: [27, -11, 138, 100] }, '/' => { wx: 278, boundingbox: [-65, -18, 386, 666] }, '0' => { wx: 500, boundingbox: [32, -7, 497, 676] }, '1' => { wx: 500, boundingbox: [49, 0, 409, 676] }, '2' => { wx: 500, boundingbox: [12, 0, 452, 676] }, '3' => { wx: 500, boundingbox: [15, -7, 465, 676] }, '4' => { wx: 500, boundingbox: [1, 0, 479, 676] }, '5' => { wx: 500, boundingbox: [15, -7, 491, 666] }, '6' => { wx: 500, boundingbox: [30, -7, 521, 686] }, '7' => { wx: 500, boundingbox: [75, -8, 537, 666] }, '8' => { wx: 500, boundingbox: [30, -7, 493, 676] }, '9' => { wx: 500, boundingbox: [23, -17, 492, 676] }, ':' => { wx: 333, boundingbox: [50, -11, 261, 441] }, ';' => { wx: 333, boundingbox: [27, -129, 261, 441] }, '<' => { wx: 675, boundingbox: [84, -8, 592, 514] }, '=' => { wx: 675, boundingbox: [86, 120, 590, 386] }, '>' => { wx: 675, boundingbox: [84, -8, 592, 514] }, '?' => { wx: 500, boundingbox: [132, -12, 472, 664] }, '@' => { wx: 920, boundingbox: [118, -18, 806, 666] }, 'A' => { wx: 611, boundingbox: [-51, 0, 564, 668] }, 'B' => { wx: 611, boundingbox: [-8, 0, 588, 653] }, 'C' => { wx: 667, boundingbox: [66, -18, 689, 666] }, 'D' => { wx: 722, boundingbox: [-8, 0, 700, 653] }, 'E' => { wx: 611, boundingbox: [-1, 0, 634, 653] }, 'F' => { wx: 611, boundingbox: [8, 0, 645, 653] }, 'G' => { wx: 722, boundingbox: [52, -18, 722, 666] }, 'H' => { wx: 722, boundingbox: [-8, 0, 767, 653] }, 'I' => { wx: 333, boundingbox: [-8, 0, 384, 653] }, 'J' => { wx: 444, boundingbox: [-6, -18, 491, 653] }, 'K' => { wx: 667, boundingbox: [7, 0, 722, 653] }, 'L' => { wx: 556, boundingbox: [-8, 0, 559, 653] }, 'M' => { wx: 833, boundingbox: [-18, 0, 873, 653] }, 'N' => { wx: 667, boundingbox: [-20, -15, 727, 653] }, 'O' => { wx: 722, boundingbox: [60, -18, 699, 666] }, 'P' => { wx: 611, boundingbox: [0, 0, 605, 653] }, 'Q' => { wx: 722, boundingbox: [59, -182, 699, 666] }, 'R' => { wx: 611, boundingbox: [-13, 0, 588, 653] }, 'S' => { wx: 500, boundingbox: [17, -18, 508, 667] }, 'T' => { wx: 556, boundingbox: [59, 0, 633, 653] }, 'U' => { wx: 722, boundingbox: [102, -18, 765, 653] }, 'V' => { wx: 611, boundingbox: [76, -18, 688, 653] }, 'W' => { wx: 833, boundingbox: [71, -18, 906, 653] }, 'X' => { wx: 611, boundingbox: [-29, 0, 655, 653] }, 'Y' => { wx: 556, boundingbox: [78, 0, 633, 653] }, 'Z' => { wx: 556, boundingbox: [-6, 0, 606, 653] }, '[' => { wx: 389, boundingbox: [21, -153, 391, 663] }, '\\\\' => { wx: 278, boundingbox: [-41, -18, 319, 666] }, ']' => { wx: 389, boundingbox: [12, -153, 382, 663] }, '^' => { wx: 422, boundingbox: [0, 301, 422, 666] }, '_' => { wx: 500, boundingbox: [0, -125, 500, -75] }, '`' => { wx: 333, boundingbox: [171, 436, 310, 666] }, 'a' => { wx: 500, boundingbox: [17, -11, 476, 441] }, 'b' => { wx: 500, boundingbox: [23, -11, 473, 683] }, 'c' => { wx: 444, boundingbox: [30, -11, 425, 441] }, 'd' => { wx: 500, boundingbox: [15, -13, 527, 683] }, 'e' => { wx: 444, boundingbox: [31, -11, 412, 441] }, 'f' => { wx: 278, boundingbox: [-147, -207, 424, 678] }, 'g' => { wx: 500, boundingbox: [8, -206, 472, 441] }, 'h' => { wx: 500, boundingbox: [19, -9, 478, 683] }, 'i' => { wx: 278, boundingbox: [49, -11, 264, 654] }, 'j' => { wx: 278, boundingbox: [-124, -207, 276, 654] }, 'k' => { wx: 444, boundingbox: [14, -11, 461, 683] }, 'l' => { wx: 278, boundingbox: [41, -11, 279, 683] }, 'm' => { wx: 722, boundingbox: [12, -9, 704, 441] }, 'n' => { wx: 500, boundingbox: [14, -9, 474, 441] }, 'o' => { wx: 500, boundingbox: [27, -11, 468, 441] }, 'p' => { wx: 500, boundingbox: [-75, -205, 469, 441] }, 'q' => { wx: 500, boundingbox: [25, -209, 483, 441] }, 'r' => { wx: 389, boundingbox: [45, 0, 412, 441] }, 's' => { wx: 389, boundingbox: [16, -13, 366, 442] }, 't' => { wx: 278, boundingbox: [37, -11, 296, 546] }, 'u' => { wx: 500, boundingbox: [42, -11, 475, 441] }, 'v' => { wx: 444, boundingbox: [21, -18, 426, 441] }, 'w' => { wx: 667, boundingbox: [16, -18, 648, 441] }, 'x' => { wx: 444, boundingbox: [-27, -11, 447, 441] }, 'y' => { wx: 444, boundingbox: [-24, -206, 426, 441] }, 'z' => { wx: 389, boundingbox: [-2, -81, 380, 428] }, '{' => { wx: 400, boundingbox: [51, -177, 407, 687] }, '|' => { wx: 275, boundingbox: [105, -217, 171, 783] }, '}' => { wx: 400, boundingbox: [-7, -177, 349, 687] }, '~' => { wx: 541, boundingbox: [40, 183, 502, 323] }, \"\\u00A1\" => { wx: 389, boundingbox: [59, -205, 322, 473] }, \"\\u00A2\" => { wx: 500, boundingbox: [77, -143, 472, 560] }, \"\\u00A3\" => { wx: 500, boundingbox: [10, -6, 517, 670] }, \"\\u00A4\" => { wx: 167, boundingbox: [-169, -10, 337, 676] }, \"\\u00A5\" => { wx: 500, boundingbox: [27, 0, 603, 653] }, \"\\u00A6\" => { wx: 500, boundingbox: [25, -182, 507, 682] }, \"\\u00A7\" => { wx: 500, boundingbox: [53, -162, 461, 666] }, \"\\u00A8\" => { wx: 500, boundingbox: [-22, 53, 522, 597] }, \"\\u00A9\" => { wx: 214, boundingbox: [132, 421, 241, 666] }, \"\\u00AA\" => { wx: 556, boundingbox: [166, 436, 514, 666] }, \"\\u00AB\" => { wx: 500, boundingbox: [53, 37, 445, 403] }, \"\\u00AC\" => { wx: 333, boundingbox: [51, 37, 281, 403] }, \"\\u00AD\" => { wx: 333, boundingbox: [52, 37, 282, 403] }, \"\\u00AE\" => { wx: 500, boundingbox: [-141, -207, 481, 681] }, \"\\u00AF\" => { wx: 500, boundingbox: [-141, -204, 518, 682] }, \"\\u00B1\" => { wx: 500, boundingbox: [-6, 197, 505, 243] }, \"\\u00B2\" => { wx: 500, boundingbox: [101, -159, 488, 666] }, \"\\u00B3\" => { wx: 500, boundingbox: [22, -143, 491, 666] }, \"\\u00B4\" => { wx: 250, boundingbox: [70, 199, 181, 310] }, \"\\u00B6\" => { wx: 523, boundingbox: [55, -123, 616, 653] }, \"\\u00B7\" => { wx: 350, boundingbox: [40, 191, 310, 461] }, \"\\u00B8\" => { wx: 333, boundingbox: [44, -129, 183, 101] }, \"\\u00B9\" => { wx: 556, boundingbox: [57, -129, 405, 101] }, \"\\u00BA\" => { wx: 556, boundingbox: [151, 436, 499, 666] }, \"\\u00BB\" => { wx: 500, boundingbox: [55, 37, 447, 403] }, \"\\u00BC\" => { wx: 889, boundingbox: [57, -11, 762, 100] }, \"\\u00BD\" => { wx: 1000, boundingbox: [25, -19, 1010, 706] }, \"\\u00BF\" => { wx: 500, boundingbox: [28, -205, 368, 471] }, \"\\u00C1\" => { wx: 333, boundingbox: [121, 492, 311, 664] }, \"\\u00C2\" => { wx: 333, boundingbox: [180, 494, 403, 664] }, \"\\u00C3\" => { wx: 333, boundingbox: [91, 492, 385, 661] }, \"\\u00C4\" => { wx: 333, boundingbox: [100, 517, 427, 624] }, \"\\u00C5\" => { wx: 333, boundingbox: [99, 532, 411, 583] }, \"\\u00C6\" => { wx: 333, boundingbox: [117, 492, 418, 650] }, \"\\u00C7\" => { wx: 333, boundingbox: [207, 548, 305, 646] }, \"\\u00C8\" => { wx: 333, boundingbox: [107, 548, 405, 646] }, \"\\u00CA\" => { wx: 333, boundingbox: [155, 492, 355, 691] }, \"\\u00CB\" => { wx: 333, boundingbox: [-30, -217, 182, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [93, 494, 486, 664] }, \"\\u00CE\" => { wx: 333, boundingbox: [20, -169, 203, 40] }, \"\\u00CF\" => { wx: 333, boundingbox: [121, 492, 426, 661] }, \"\\u00D0\" => { wx: 889, boundingbox: [-6, 197, 894, 243] }, \"\\u00E1\" => { wx: 889, boundingbox: [-27, 0, 911, 653] }, \"\\u00E3\" => { wx: 276, boundingbox: [42, 406, 352, 676] }, \"\\u00E8\" => { wx: 556, boundingbox: [-8, 0, 559, 653] }, \"\\u00E9\" => { wx: 722, boundingbox: [60, -105, 699, 722] }, \"\\u00EA\" => { wx: 944, boundingbox: [49, -8, 964, 666] }, \"\\u00EB\" => { wx: 310, boundingbox: [67, 406, 362, 676] }, \"\\u00F1\" => { wx: 667, boundingbox: [23, -11, 640, 441] }, \"\\u00F5\" => { wx: 278, boundingbox: [49, -11, 235, 441] }, \"\\u00F8\" => { wx: 278, boundingbox: [41, -11, 312, 683] }, \"\\u00F9\" => { wx: 500, boundingbox: [28, -135, 469, 554] }, \"\\u00FA\" => { wx: 667, boundingbox: [20, -12, 646, 441] }, \"\\u00FB\" => { wx: 500, boundingbox: [-168, -207, 493, 679] }, \"\\xFF\" => { wx: 500, boundingbox: [0, 0, 0, 0] } }\n times_bolditalic_metrics = { ' ' => { wx: 250, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 389, boundingbox: [67, -13, 370, 684] }, '\"' => { wx: 555, boundingbox: [136, 398, 536, 685] }, '#' => { wx: 500, boundingbox: [-33, 0, 533, 700] }, '$' => { wx: 500, boundingbox: [-20, -100, 497, 733] }, '%' => { wx: 833, boundingbox: [39, -10, 793, 692] }, '&' => { wx: 778, boundingbox: [5, -19, 699, 682] }, \"'\" => { wx: 333, boundingbox: [98, 369, 302, 685] }, '(' => { wx: 333, boundingbox: [28, -179, 344, 685] }, ')' => { wx: 333, boundingbox: [-44, -179, 271, 685] }, '*' => { wx: 500, boundingbox: [65, 249, 456, 685] }, '+' => { wx: 570, boundingbox: [33, 0, 537, 506] }, ',' => { wx: 250, boundingbox: [-60, -182, 144, 134] }, '-' => { wx: 333, boundingbox: [2, 166, 271, 282] }, '.' => { wx: 250, boundingbox: [-9, -13, 139, 135] }, '/' => { wx: 278, boundingbox: [-64, -18, 342, 685] }, '0' => { wx: 500, boundingbox: [17, -14, 477, 683] }, '1' => { wx: 500, boundingbox: [5, 0, 419, 683] }, '2' => { wx: 500, boundingbox: [-27, 0, 446, 683] }, '3' => { wx: 500, boundingbox: [-15, -13, 450, 683] }, '4' => { wx: 500, boundingbox: [-15, 0, 503, 683] }, '5' => { wx: 500, boundingbox: [-11, -13, 487, 669] }, '6' => { wx: 500, boundingbox: [23, -15, 509, 679] }, '7' => { wx: 500, boundingbox: [52, 0, 525, 669] }, '8' => { wx: 500, boundingbox: [3, -13, 476, 683] }, '9' => { wx: 500, boundingbox: [-12, -10, 475, 683] }, ':' => { wx: 333, boundingbox: [23, -13, 264, 459] }, ';' => { wx: 333, boundingbox: [-25, -183, 264, 459] }, '<' => { wx: 570, boundingbox: [31, -8, 539, 514] }, '=' => { wx: 570, boundingbox: [33, 107, 537, 399] }, '>' => { wx: 570, boundingbox: [31, -8, 539, 514] }, '?' => { wx: 500, boundingbox: [79, -13, 470, 684] }, '@' => { wx: 832, boundingbox: [63, -18, 770, 685] }, 'A' => { wx: 667, boundingbox: [-67, 0, 593, 683] }, 'B' => { wx: 667, boundingbox: [-24, 0, 624, 669] }, 'C' => { wx: 667, boundingbox: [32, -18, 677, 685] }, 'D' => { wx: 722, boundingbox: [-46, 0, 685, 669] }, 'E' => { wx: 667, boundingbox: [-27, 0, 653, 669] }, 'F' => { wx: 667, boundingbox: [-13, 0, 660, 669] }, 'G' => { wx: 722, boundingbox: [21, -18, 706, 685] }, 'H' => { wx: 778, boundingbox: [-24, 0, 799, 669] }, 'I' => { wx: 389, boundingbox: [-32, 0, 406, 669] }, 'J' => { wx: 500, boundingbox: [-46, -99, 524, 669] }, 'K' => { wx: 667, boundingbox: [-21, 0, 702, 669] }, 'L' => { wx: 611, boundingbox: [-22, 0, 590, 669] }, 'M' => { wx: 889, boundingbox: [-29, -12, 917, 669] }, 'N' => { wx: 722, boundingbox: [-27, -15, 748, 669] }, 'O' => { wx: 722, boundingbox: [27, -18, 691, 685] }, 'P' => { wx: 611, boundingbox: [-27, 0, 613, 669] }, 'Q' => { wx: 722, boundingbox: [27, -208, 691, 685] }, 'R' => { wx: 667, boundingbox: [-29, 0, 623, 669] }, 'S' => { wx: 556, boundingbox: [2, -18, 526, 685] }, 'T' => { wx: 611, boundingbox: [50, 0, 650, 669] }, 'U' => { wx: 722, boundingbox: [67, -18, 744, 669] }, 'V' => { wx: 667, boundingbox: [65, -18, 715, 669] }, 'W' => { wx: 889, boundingbox: [65, -18, 940, 669] }, 'X' => { wx: 667, boundingbox: [-24, 0, 694, 669] }, 'Y' => { wx: 611, boundingbox: [73, 0, 659, 669] }, 'Z' => { wx: 611, boundingbox: [-11, 0, 590, 669] }, '[' => { wx: 333, boundingbox: [-37, -159, 362, 674] }, '\\\\' => { wx: 278, boundingbox: [-1, -18, 279, 685] }, ']' => { wx: 333, boundingbox: [-56, -157, 343, 674] }, '^' => { wx: 570, boundingbox: [67, 304, 503, 669] }, '_' => { wx: 500, boundingbox: [0, -125, 500, -75] }, '`' => { wx: 333, boundingbox: [128, 369, 332, 685] }, 'a' => { wx: 500, boundingbox: [-21, -14, 455, 462] }, 'b' => { wx: 500, boundingbox: [-14, -13, 444, 699] }, 'c' => { wx: 444, boundingbox: [-5, -13, 392, 462] }, 'd' => { wx: 500, boundingbox: [-21, -13, 517, 699] }, 'e' => { wx: 444, boundingbox: [5, -13, 398, 462] }, 'f' => { wx: 333, boundingbox: [-169, -205, 446, 698] }, 'g' => { wx: 500, boundingbox: [-52, -203, 478, 462] }, 'h' => { wx: 556, boundingbox: [-13, -9, 498, 699] }, 'i' => { wx: 278, boundingbox: [2, -9, 263, 684] }, 'j' => { wx: 278, boundingbox: [-189, -207, 279, 684] }, 'k' => { wx: 500, boundingbox: [-23, -8, 483, 699] }, 'l' => { wx: 278, boundingbox: [2, -9, 290, 699] }, 'm' => { wx: 778, boundingbox: [-14, -9, 722, 462] }, 'n' => { wx: 556, boundingbox: [-6, -9, 493, 462] }, 'o' => { wx: 500, boundingbox: [-3, -13, 441, 462] }, 'p' => { wx: 500, boundingbox: [-120, -205, 446, 462] }, 'q' => { wx: 500, boundingbox: [1, -205, 471, 462] }, 'r' => { wx: 389, boundingbox: [-21, 0, 389, 462] }, 's' => { wx: 389, boundingbox: [-19, -13, 333, 462] }, 't' => { wx: 278, boundingbox: [-11, -9, 281, 594] }, 'u' => { wx: 556, boundingbox: [15, -9, 492, 462] }, 'v' => { wx: 444, boundingbox: [16, -13, 401, 462] }, 'w' => { wx: 667, boundingbox: [16, -13, 614, 462] }, 'x' => { wx: 500, boundingbox: [-46, -13, 469, 462] }, 'y' => { wx: 444, boundingbox: [-94, -205, 392, 462] }, 'z' => { wx: 389, boundingbox: [-43, -78, 368, 449] }, '{' => { wx: 348, boundingbox: [5, -187, 436, 686] }, '|' => { wx: 220, boundingbox: [66, -218, 154, 782] }, '}' => { wx: 348, boundingbox: [-129, -187, 302, 686] }, '~' => { wx: 570, boundingbox: [54, 173, 516, 333] }, \"\\u00A1\" => { wx: 389, boundingbox: [19, -205, 322, 492] }, \"\\u00A2\" => { wx: 500, boundingbox: [42, -143, 439, 576] }, \"\\u00A3\" => { wx: 500, boundingbox: [-32, -12, 510, 683] }, \"\\u00A4\" => { wx: 167, boundingbox: [-169, -14, 324, 683] }, \"\\u00A5\" => { wx: 500, boundingbox: [33, 0, 628, 669] }, \"\\u00A6\" => { wx: 500, boundingbox: [-87, -156, 537, 707] }, \"\\u00A7\" => { wx: 500, boundingbox: [36, -143, 459, 685] }, \"\\u00A8\" => { wx: 500, boundingbox: [-26, 34, 526, 586] }, \"\\u00A9\" => { wx: 278, boundingbox: [128, 398, 268, 685] }, \"\\u00AA\" => { wx: 500, boundingbox: [53, 369, 513, 685] }, \"\\u00AB\" => { wx: 500, boundingbox: [12, 32, 468, 415] }, \"\\u00AC\" => { wx: 333, boundingbox: [32, 32, 303, 415] }, \"\\u00AD\" => { wx: 333, boundingbox: [10, 32, 281, 415] }, \"\\u00AE\" => { wx: 556, boundingbox: [-188, -205, 514, 703] }, \"\\u00AF\" => { wx: 556, boundingbox: [-186, -205, 553, 704] }, \"\\u00B1\" => { wx: 500, boundingbox: [-40, 178, 477, 269] }, \"\\u00B2\" => { wx: 500, boundingbox: [91, -145, 494, 685] }, \"\\u00B3\" => { wx: 500, boundingbox: [10, -139, 493, 685] }, \"\\u00B4\" => { wx: 250, boundingbox: [51, 257, 199, 405] }, \"\\u00B6\" => { wx: 500, boundingbox: [-57, -193, 562, 669] }, \"\\u00B7\" => { wx: 350, boundingbox: [0, 175, 350, 525] }, \"\\u00B8\" => { wx: 333, boundingbox: [-5, -182, 199, 134] }, \"\\u00B9\" => { wx: 500, boundingbox: [-57, -182, 403, 134] }, \"\\u00BA\" => { wx: 500, boundingbox: [53, 369, 513, 685] }, \"\\u00BB\" => { wx: 500, boundingbox: [12, 32, 468, 415] }, \"\\u00BC\" => { wx: 1000, boundingbox: [40, -13, 852, 135] }, \"\\u00BD\" => { wx: 1000, boundingbox: [7, -29, 996, 706] }, \"\\u00BF\" => { wx: 500, boundingbox: [30, -205, 421, 492] }, \"\\u00C1\" => { wx: 333, boundingbox: [85, 516, 297, 697] }, \"\\u00C2\" => { wx: 333, boundingbox: [139, 516, 379, 697] }, \"\\u00C3\" => { wx: 333, boundingbox: [40, 516, 367, 690] }, \"\\u00C4\" => { wx: 333, boundingbox: [48, 536, 407, 655] }, \"\\u00C5\" => { wx: 333, boundingbox: [51, 553, 393, 623] }, \"\\u00C6\" => { wx: 333, boundingbox: [71, 516, 387, 678] }, \"\\u00C7\" => { wx: 333, boundingbox: [163, 550, 298, 684] }, \"\\u00C8\" => { wx: 333, boundingbox: [55, 550, 402, 684] }, \"\\u00CA\" => { wx: 333, boundingbox: [127, 516, 340, 729] }, \"\\u00CB\" => { wx: 333, boundingbox: [-80, -218, 156, 5] }, \"\\u00CD\" => { wx: 333, boundingbox: [69, 516, 498, 697] }, \"\\u00CE\" => { wx: 333, boundingbox: [15, -183, 244, 34] }, \"\\u00CF\" => { wx: 333, boundingbox: [79, 516, 411, 690] }, \"\\u00D0\" => { wx: 1000, boundingbox: [-40, 178, 977, 269] }, \"\\u00E1\" => { wx: 944, boundingbox: [-64, 0, 918, 669] }, \"\\u00E3\" => { wx: 266, boundingbox: [16, 399, 330, 685] }, \"\\u00E8\" => { wx: 611, boundingbox: [-22, 0, 590, 669] }, \"\\u00E9\" => { wx: 722, boundingbox: [27, -125, 691, 764] }, \"\\u00EA\" => { wx: 944, boundingbox: [23, -8, 946, 677] }, \"\\u00EB\" => { wx: 300, boundingbox: [56, 400, 347, 685] }, \"\\u00F1\" => { wx: 722, boundingbox: [-5, -13, 673, 462] }, \"\\u00F5\" => { wx: 278, boundingbox: [2, -9, 238, 462] }, \"\\u00F8\" => { wx: 278, boundingbox: [-7, -9, 307, 699] }, \"\\u00F9\" => { wx: 500, boundingbox: [-3, -119, 441, 560] }, \"\\u00FA\" => { wx: 722, boundingbox: [6, -13, 674, 462] }, \"\\u00FB\" => { wx: 500, boundingbox: [-200, -200, 473, 705] }, \"\\xFF\" => { wx: 500, boundingbox: [0, 0, 0, 0] } }\n helvetica_metrics = { ' ' => { wx: 278, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 278, boundingbox: [90, 0, 187, 718] }, '\"' => { wx: 355, boundingbox: [70, 463, 285, 718] }, '#' => { wx: 556, boundingbox: [28, 0, 529, 688] }, '$' => { wx: 556, boundingbox: [32, -115, 520, 775] }, '%' => { wx: 889, boundingbox: [39, -19, 850, 703] }, '&' => { wx: 667, boundingbox: [44, -15, 645, 718] }, \"'\" => { wx: 222, boundingbox: [53, 463, 157, 718] }, '(' => { wx: 333, boundingbox: [68, -207, 299, 733] }, ')' => { wx: 333, boundingbox: [34, -207, 265, 733] }, '*' => { wx: 389, boundingbox: [39, 431, 349, 718] }, '+' => { wx: 584, boundingbox: [39, 0, 545, 505] }, ',' => { wx: 278, boundingbox: [87, -147, 191, 106] }, '-' => { wx: 333, boundingbox: [44, 232, 289, 322] }, '.' => { wx: 278, boundingbox: [87, 0, 191, 106] }, '/' => { wx: 278, boundingbox: [-17, -19, 295, 737] }, '0' => { wx: 556, boundingbox: [37, -19, 519, 703] }, '1' => { wx: 556, boundingbox: [101, 0, 359, 703] }, '2' => { wx: 556, boundingbox: [26, 0, 507, 703] }, '3' => { wx: 556, boundingbox: [34, -19, 522, 703] }, '4' => { wx: 556, boundingbox: [25, 0, 523, 703] }, '5' => { wx: 556, boundingbox: [32, -19, 514, 688] }, '6' => { wx: 556, boundingbox: [38, -19, 518, 703] }, '7' => { wx: 556, boundingbox: [37, 0, 523, 688] }, '8' => { wx: 556, boundingbox: [38, -19, 517, 703] }, '9' => { wx: 556, boundingbox: [42, -19, 514, 703] }, ':' => { wx: 278, boundingbox: [87, 0, 191, 516] }, ';' => { wx: 278, boundingbox: [87, -147, 191, 516] }, '<' => { wx: 584, boundingbox: [48, 11, 536, 495] }, '=' => { wx: 584, boundingbox: [39, 115, 545, 390] }, '>' => { wx: 584, boundingbox: [48, 11, 536, 495] }, '?' => { wx: 556, boundingbox: [56, 0, 492, 727] }, '@' => { wx: 1015, boundingbox: [147, -19, 868, 737] }, 'A' => { wx: 667, boundingbox: [14, 0, 654, 718] }, 'B' => { wx: 667, boundingbox: [74, 0, 627, 718] }, 'C' => { wx: 722, boundingbox: [44, -19, 681, 737] }, 'D' => { wx: 722, boundingbox: [81, 0, 674, 718] }, 'E' => { wx: 667, boundingbox: [86, 0, 616, 718] }, 'F' => { wx: 611, boundingbox: [86, 0, 583, 718] }, 'G' => { wx: 778, boundingbox: [48, -19, 704, 737] }, 'H' => { wx: 722, boundingbox: [77, 0, 646, 718] }, 'I' => { wx: 278, boundingbox: [91, 0, 188, 718] }, 'J' => { wx: 500, boundingbox: [17, -19, 428, 718] }, 'K' => { wx: 667, boundingbox: [76, 0, 663, 718] }, 'L' => { wx: 556, boundingbox: [76, 0, 537, 718] }, 'M' => { wx: 833, boundingbox: [73, 0, 761, 718] }, 'N' => { wx: 722, boundingbox: [76, 0, 646, 718] }, 'O' => { wx: 778, boundingbox: [39, -19, 739, 737] }, 'P' => { wx: 667, boundingbox: [86, 0, 622, 718] }, 'Q' => { wx: 778, boundingbox: [39, -56, 739, 737] }, 'R' => { wx: 722, boundingbox: [88, 0, 684, 718] }, 'S' => { wx: 667, boundingbox: [49, -19, 620, 737] }, 'T' => { wx: 611, boundingbox: [14, 0, 597, 718] }, 'U' => { wx: 722, boundingbox: [79, -19, 644, 718] }, 'V' => { wx: 667, boundingbox: [20, 0, 647, 718] }, 'W' => { wx: 944, boundingbox: [16, 0, 928, 718] }, 'X' => { wx: 667, boundingbox: [19, 0, 648, 718] }, 'Y' => { wx: 667, boundingbox: [14, 0, 653, 718] }, 'Z' => { wx: 611, boundingbox: [23, 0, 588, 718] }, '[' => { wx: 278, boundingbox: [63, -196, 250, 722] }, '\\\\' => { wx: 278, boundingbox: [-17, -19, 295, 737] }, ']' => { wx: 278, boundingbox: [28, -196, 215, 722] }, '^' => { wx: 469, boundingbox: [-14, 264, 483, 688] }, '_' => { wx: 556, boundingbox: [0, -125, 556, -75] }, '`' => { wx: 222, boundingbox: [65, 470, 169, 725] }, 'a' => { wx: 556, boundingbox: [36, -15, 530, 538] }, 'b' => { wx: 556, boundingbox: [58, -15, 517, 718] }, 'c' => { wx: 500, boundingbox: [30, -15, 477, 538] }, 'd' => { wx: 556, boundingbox: [35, -15, 499, 718] }, 'e' => { wx: 556, boundingbox: [40, -15, 516, 538] }, 'f' => { wx: 278, boundingbox: [14, 0, 262, 728] }, 'g' => { wx: 556, boundingbox: [40, -220, 499, 538] }, 'h' => { wx: 556, boundingbox: [65, 0, 491, 718] }, 'i' => { wx: 222, boundingbox: [67, 0, 155, 718] }, 'j' => { wx: 222, boundingbox: [-16, -210, 155, 718] }, 'k' => { wx: 500, boundingbox: [67, 0, 501, 718] }, 'l' => { wx: 222, boundingbox: [67, 0, 155, 718] }, 'm' => { wx: 833, boundingbox: [65, 0, 769, 538] }, 'n' => { wx: 556, boundingbox: [65, 0, 491, 538] }, 'o' => { wx: 556, boundingbox: [35, -14, 521, 538] }, 'p' => { wx: 556, boundingbox: [58, -207, 517, 538] }, 'q' => { wx: 556, boundingbox: [35, -207, 494, 538] }, 'r' => { wx: 333, boundingbox: [77, 0, 332, 538] }, 's' => { wx: 500, boundingbox: [32, -15, 464, 538] }, 't' => { wx: 278, boundingbox: [14, -7, 257, 669] }, 'u' => { wx: 556, boundingbox: [68, -15, 489, 523] }, 'v' => { wx: 500, boundingbox: [8, 0, 492, 523] }, 'w' => { wx: 722, boundingbox: [14, 0, 709, 523] }, 'x' => { wx: 500, boundingbox: [11, 0, 490, 523] }, 'y' => { wx: 500, boundingbox: [11, -214, 489, 523] }, 'z' => { wx: 500, boundingbox: [31, 0, 469, 523] }, '{' => { wx: 334, boundingbox: [42, -196, 292, 722] }, '|' => { wx: 260, boundingbox: [94, -225, 167, 775] }, '}' => { wx: 334, boundingbox: [42, -196, 292, 722] }, '~' => { wx: 584, boundingbox: [61, 180, 523, 326] }, \"\\u00A1\" => { wx: 333, boundingbox: [118, -195, 215, 523] }, \"\\u00A2\" => { wx: 556, boundingbox: [51, -115, 513, 623] }, \"\\u00A3\" => { wx: 556, boundingbox: [33, -16, 539, 718] }, \"\\u00A4\" => { wx: 167, boundingbox: [-166, -19, 333, 703] }, \"\\u00A5\" => { wx: 556, boundingbox: [3, 0, 553, 688] }, \"\\u00A6\" => { wx: 556, boundingbox: [-11, -207, 501, 737] }, \"\\u00A7\" => { wx: 556, boundingbox: [43, -191, 512, 737] }, \"\\u00A8\" => { wx: 556, boundingbox: [28, 99, 528, 603] }, \"\\u00A9\" => { wx: 191, boundingbox: [59, 463, 132, 718] }, \"\\u00AA\" => { wx: 333, boundingbox: [38, 470, 307, 725] }, \"\\u00AB\" => { wx: 556, boundingbox: [97, 108, 459, 446] }, \"\\u00AC\" => { wx: 333, boundingbox: [88, 108, 245, 446] }, \"\\u00AD\" => { wx: 333, boundingbox: [88, 108, 245, 446] }, \"\\u00AE\" => { wx: 500, boundingbox: [14, 0, 434, 728] }, \"\\u00AF\" => { wx: 500, boundingbox: [14, 0, 432, 728] }, \"\\u00B1\" => { wx: 556, boundingbox: [0, 240, 556, 313] }, \"\\u00B2\" => { wx: 556, boundingbox: [43, -159, 514, 718] }, \"\\u00B3\" => { wx: 556, boundingbox: [43, -159, 514, 718] }, \"\\u00B4\" => { wx: 278, boundingbox: [77, 190, 202, 315] }, \"\\u00B6\" => { wx: 537, boundingbox: [18, -173, 497, 718] }, \"\\u00B7\" => { wx: 350, boundingbox: [18, 202, 333, 517] }, \"\\u00B8\" => { wx: 222, boundingbox: [53, -149, 157, 106] }, \"\\u00B9\" => { wx: 333, boundingbox: [26, -149, 295, 106] }, \"\\u00BA\" => { wx: 333, boundingbox: [26, 463, 295, 718] }, \"\\u00BB\" => { wx: 556, boundingbox: [97, 108, 459, 446] }, \"\\u00BC\" => { wx: 1000, boundingbox: [115, 0, 885, 106] }, \"\\u00BD\" => { wx: 1000, boundingbox: [7, -19, 994, 703] }, \"\\u00BF\" => { wx: 611, boundingbox: [91, -201, 527, 525] }, \"\\u00C1\" => { wx: 333, boundingbox: [14, 593, 211, 734] }, \"\\u00C2\" => { wx: 333, boundingbox: [122, 593, 319, 734] }, \"\\u00C3\" => { wx: 333, boundingbox: [21, 593, 312, 734] }, \"\\u00C4\" => { wx: 333, boundingbox: [-4, 606, 337, 722] }, \"\\u00C5\" => { wx: 333, boundingbox: [10, 627, 323, 684] }, \"\\u00C6\" => { wx: 333, boundingbox: [13, 595, 321, 731] }, \"\\u00C7\" => { wx: 333, boundingbox: [121, 604, 212, 706] }, \"\\u00C8\" => { wx: 333, boundingbox: [40, 604, 293, 706] }, \"\\u00CA\" => { wx: 333, boundingbox: [75, 572, 259, 756] }, \"\\u00CB\" => { wx: 333, boundingbox: [45, -225, 259, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [31, 593, 409, 734] }, \"\\u00CE\" => { wx: 333, boundingbox: [73, -225, 287, 0] }, \"\\u00CF\" => { wx: 333, boundingbox: [21, 593, 312, 734] }, \"\\u00D0\" => { wx: 1000, boundingbox: [0, 240, 1000, 313] }, \"\\u00E1\" => { wx: 1000, boundingbox: [8, 0, 951, 718] }, \"\\u00E3\" => { wx: 370, boundingbox: [24, 405, 346, 737] }, \"\\u00E8\" => { wx: 556, boundingbox: [-20, 0, 537, 718] }, \"\\u00E9\" => { wx: 778, boundingbox: [39, -19, 740, 737] }, \"\\u00EA\" => { wx: 1000, boundingbox: [36, -19, 965, 737] }, \"\\u00EB\" => { wx: 365, boundingbox: [25, 405, 341, 737] }, \"\\u00F1\" => { wx: 889, boundingbox: [36, -15, 847, 538] }, \"\\u00F5\" => { wx: 278, boundingbox: [95, 0, 183, 523] }, \"\\u00F8\" => { wx: 222, boundingbox: [-20, 0, 242, 718] }, \"\\u00F9\" => { wx: 611, boundingbox: [28, -22, 537, 545] }, \"\\u00FA\" => { wx: 944, boundingbox: [35, -15, 902, 538] }, \"\\u00FB\" => { wx: 611, boundingbox: [67, -15, 571, 728] }, \"\\xFF\" => { wx: 556, boundingbox: [0, 0, 0, 0] } }\n helvetica_bold_metrics = { ' ' => { wx: 278, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 333, boundingbox: [90, 0, 244, 718] }, '\"' => { wx: 474, boundingbox: [98, 447, 376, 718] }, '#' => { wx: 556, boundingbox: [18, 0, 538, 698] }, '$' => { wx: 556, boundingbox: [30, -115, 523, 775] }, '%' => { wx: 889, boundingbox: [28, -19, 861, 710] }, '&' => { wx: 722, boundingbox: [54, -19, 701, 718] }, \"'\" => { wx: 278, boundingbox: [69, 445, 209, 718] }, '(' => { wx: 333, boundingbox: [35, -208, 314, 734] }, ')' => { wx: 333, boundingbox: [19, -208, 298, 734] }, '*' => { wx: 389, boundingbox: [27, 387, 362, 718] }, '+' => { wx: 584, boundingbox: [40, 0, 544, 506] }, ',' => { wx: 278, boundingbox: [64, -168, 214, 146] }, '-' => { wx: 333, boundingbox: [27, 215, 306, 345] }, '.' => { wx: 278, boundingbox: [64, 0, 214, 146] }, '/' => { wx: 278, boundingbox: [-33, -19, 311, 737] }, '0' => { wx: 556, boundingbox: [32, -19, 524, 710] }, '1' => { wx: 556, boundingbox: [69, 0, 378, 710] }, '2' => { wx: 556, boundingbox: [26, 0, 511, 710] }, '3' => { wx: 556, boundingbox: [27, -19, 516, 710] }, '4' => { wx: 556, boundingbox: [27, 0, 526, 710] }, '5' => { wx: 556, boundingbox: [27, -19, 516, 698] }, '6' => { wx: 556, boundingbox: [31, -19, 520, 710] }, '7' => { wx: 556, boundingbox: [25, 0, 528, 698] }, '8' => { wx: 556, boundingbox: [32, -19, 524, 710] }, '9' => { wx: 556, boundingbox: [30, -19, 522, 710] }, ':' => { wx: 333, boundingbox: [92, 0, 242, 512] }, ';' => { wx: 333, boundingbox: [92, -168, 242, 512] }, '<' => { wx: 584, boundingbox: [38, -8, 546, 514] }, '=' => { wx: 584, boundingbox: [40, 87, 544, 419] }, '>' => { wx: 584, boundingbox: [38, -8, 546, 514] }, '?' => { wx: 611, boundingbox: [60, 0, 556, 727] }, '@' => { wx: 975, boundingbox: [118, -19, 856, 737] }, 'A' => { wx: 722, boundingbox: [20, 0, 702, 718] }, 'B' => { wx: 722, boundingbox: [76, 0, 669, 718] }, 'C' => { wx: 722, boundingbox: [44, -19, 684, 737] }, 'D' => { wx: 722, boundingbox: [76, 0, 685, 718] }, 'E' => { wx: 667, boundingbox: [76, 0, 621, 718] }, 'F' => { wx: 611, boundingbox: [76, 0, 587, 718] }, 'G' => { wx: 778, boundingbox: [44, -19, 713, 737] }, 'H' => { wx: 722, boundingbox: [71, 0, 651, 718] }, 'I' => { wx: 278, boundingbox: [64, 0, 214, 718] }, 'J' => { wx: 556, boundingbox: [22, -18, 484, 718] }, 'K' => { wx: 722, boundingbox: [87, 0, 722, 718] }, 'L' => { wx: 611, boundingbox: [76, 0, 583, 718] }, 'M' => { wx: 833, boundingbox: [69, 0, 765, 718] }, 'N' => { wx: 722, boundingbox: [69, 0, 654, 718] }, 'O' => { wx: 778, boundingbox: [44, -19, 734, 737] }, 'P' => { wx: 667, boundingbox: [76, 0, 627, 718] }, 'Q' => { wx: 778, boundingbox: [44, -52, 737, 737] }, 'R' => { wx: 722, boundingbox: [76, 0, 677, 718] }, 'S' => { wx: 667, boundingbox: [39, -19, 629, 737] }, 'T' => { wx: 611, boundingbox: [14, 0, 598, 718] }, 'U' => { wx: 722, boundingbox: [72, -19, 651, 718] }, 'V' => { wx: 667, boundingbox: [19, 0, 648, 718] }, 'W' => { wx: 944, boundingbox: [16, 0, 929, 718] }, 'X' => { wx: 667, boundingbox: [14, 0, 653, 718] }, 'Y' => { wx: 667, boundingbox: [15, 0, 653, 718] }, 'Z' => { wx: 611, boundingbox: [25, 0, 586, 718] }, '[' => { wx: 333, boundingbox: [63, -196, 309, 722] }, '\\\\' => { wx: 278, boundingbox: [-33, -19, 311, 737] }, ']' => { wx: 333, boundingbox: [24, -196, 270, 722] }, '^' => { wx: 584, boundingbox: [62, 323, 522, 698] }, '_' => { wx: 556, boundingbox: [0, -125, 556, -75] }, '`' => { wx: 278, boundingbox: [69, 454, 209, 727] }, 'a' => { wx: 556, boundingbox: [29, -14, 527, 546] }, 'b' => { wx: 611, boundingbox: [61, -14, 578, 718] }, 'c' => { wx: 556, boundingbox: [34, -14, 524, 546] }, 'd' => { wx: 611, boundingbox: [34, -14, 551, 718] }, 'e' => { wx: 556, boundingbox: [23, -14, 528, 546] }, 'f' => { wx: 333, boundingbox: [10, 0, 318, 727] }, 'g' => { wx: 611, boundingbox: [40, -217, 553, 546] }, 'h' => { wx: 611, boundingbox: [65, 0, 546, 718] }, 'i' => { wx: 278, boundingbox: [69, 0, 209, 725] }, 'j' => { wx: 278, boundingbox: [3, -214, 209, 725] }, 'k' => { wx: 556, boundingbox: [69, 0, 562, 718] }, 'l' => { wx: 278, boundingbox: [69, 0, 209, 718] }, 'm' => { wx: 889, boundingbox: [64, 0, 826, 546] }, 'n' => { wx: 611, boundingbox: [65, 0, 546, 546] }, 'o' => { wx: 611, boundingbox: [34, -14, 578, 546] }, 'p' => { wx: 611, boundingbox: [62, -207, 578, 546] }, 'q' => { wx: 611, boundingbox: [34, -207, 552, 546] }, 'r' => { wx: 389, boundingbox: [64, 0, 373, 546] }, 's' => { wx: 556, boundingbox: [30, -14, 519, 546] }, 't' => { wx: 333, boundingbox: [10, -6, 309, 676] }, 'u' => { wx: 611, boundingbox: [66, -14, 545, 532] }, 'v' => { wx: 556, boundingbox: [13, 0, 543, 532] }, 'w' => { wx: 778, boundingbox: [10, 0, 769, 532] }, 'x' => { wx: 556, boundingbox: [15, 0, 541, 532] }, 'y' => { wx: 556, boundingbox: [10, -214, 539, 532] }, 'z' => { wx: 500, boundingbox: [20, 0, 480, 532] }, '{' => { wx: 389, boundingbox: [48, -196, 365, 722] }, '|' => { wx: 280, boundingbox: [84, -225, 196, 775] }, '}' => { wx: 389, boundingbox: [24, -196, 341, 722] }, '~' => { wx: 584, boundingbox: [61, 163, 523, 343] }, \"\\u00A1\" => { wx: 333, boundingbox: [90, -186, 244, 532] }, \"\\u00A2\" => { wx: 556, boundingbox: [34, -118, 524, 628] }, \"\\u00A3\" => { wx: 556, boundingbox: [28, -16, 541, 718] }, \"\\u00A4\" => { wx: 167, boundingbox: [-170, -19, 336, 710] }, \"\\u00A5\" => { wx: 556, boundingbox: [-9, 0, 565, 698] }, \"\\u00A6\" => { wx: 556, boundingbox: [-10, -210, 516, 737] }, \"\\u00A7\" => { wx: 556, boundingbox: [34, -184, 522, 727] }, \"\\u00A8\" => { wx: 556, boundingbox: [-3, 76, 559, 636] }, \"\\u00A9\" => { wx: 238, boundingbox: [70, 447, 168, 718] }, \"\\u00AA\" => { wx: 500, boundingbox: [64, 454, 436, 727] }, \"\\u00AB\" => { wx: 556, boundingbox: [88, 76, 468, 484] }, \"\\u00AC\" => { wx: 333, boundingbox: [83, 76, 250, 484] }, \"\\u00AD\" => { wx: 333, boundingbox: [83, 76, 250, 484] }, \"\\u00AE\" => { wx: 611, boundingbox: [10, 0, 542, 727] }, \"\\u00AF\" => { wx: 611, boundingbox: [10, 0, 542, 727] }, \"\\u00B1\" => { wx: 556, boundingbox: [0, 227, 556, 333] }, \"\\u00B2\" => { wx: 556, boundingbox: [36, -171, 520, 718] }, \"\\u00B3\" => { wx: 556, boundingbox: [36, -171, 520, 718] }, \"\\u00B4\" => { wx: 278, boundingbox: [58, 172, 220, 334] }, \"\\u00B6\" => { wx: 556, boundingbox: [-8, -191, 539, 700] }, \"\\u00B7\" => { wx: 350, boundingbox: [10, 194, 340, 524] }, \"\\u00B8\" => { wx: 278, boundingbox: [69, -146, 209, 127] }, \"\\u00B9\" => { wx: 500, boundingbox: [64, -146, 436, 127] }, \"\\u00BA\" => { wx: 500, boundingbox: [64, 445, 436, 718] }, \"\\u00BB\" => { wx: 556, boundingbox: [88, 76, 468, 484] }, \"\\u00BC\" => { wx: 1000, boundingbox: [92, 0, 908, 146] }, \"\\u00BD\" => { wx: 1000, boundingbox: [-3, -19, 1003, 710] }, \"\\u00BF\" => { wx: 611, boundingbox: [55, -195, 551, 532] }, \"\\u00C1\" => { wx: 333, boundingbox: [-23, 604, 225, 750] }, \"\\u00C2\" => { wx: 333, boundingbox: [108, 604, 356, 750] }, \"\\u00C3\" => { wx: 333, boundingbox: [-10, 604, 343, 750] }, \"\\u00C4\" => { wx: 333, boundingbox: [-17, 610, 350, 737] }, \"\\u00C5\" => { wx: 333, boundingbox: [-6, 604, 339, 678] }, \"\\u00C6\" => { wx: 333, boundingbox: [-2, 604, 335, 750] }, \"\\u00C7\" => { wx: 333, boundingbox: [104, 614, 230, 729] }, \"\\u00C8\" => { wx: 333, boundingbox: [6, 614, 327, 729] }, \"\\u00CA\" => { wx: 333, boundingbox: [59, 568, 275, 776] }, \"\\u00CB\" => { wx: 333, boundingbox: [6, -228, 245, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [9, 604, 486, 750] }, \"\\u00CE\" => { wx: 333, boundingbox: [71, -228, 304, 0] }, \"\\u00CF\" => { wx: 333, boundingbox: [-10, 604, 343, 750] }, \"\\u00D0\" => { wx: 1000, boundingbox: [0, 227, 1000, 333] }, \"\\u00E1\" => { wx: 1000, boundingbox: [5, 0, 954, 718] }, \"\\u00E3\" => { wx: 370, boundingbox: [22, 401, 347, 737] }, \"\\u00E8\" => { wx: 611, boundingbox: [-20, 0, 583, 718] }, \"\\u00E9\" => { wx: 778, boundingbox: [33, -27, 744, 745] }, \"\\u00EA\" => { wx: 1000, boundingbox: [37, -19, 961, 737] }, \"\\u00EB\" => { wx: 365, boundingbox: [6, 401, 360, 737] }, \"\\u00F1\" => { wx: 889, boundingbox: [29, -14, 858, 546] }, \"\\u00F5\" => { wx: 278, boundingbox: [69, 0, 209, 532] }, \"\\u00F8\" => { wx: 278, boundingbox: [-18, 0, 296, 718] }, \"\\u00F9\" => { wx: 611, boundingbox: [22, -29, 589, 560] }, \"\\u00FA\" => { wx: 944, boundingbox: [34, -14, 912, 546] }, \"\\u00FB\" => { wx: 611, boundingbox: [69, -14, 579, 731] }, \"\\xFF\" => { wx: 556, boundingbox: [0, 0, 0, 0] } }\n helvetica_oblique_metrics = { ' ' => { wx: 278, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 278, boundingbox: [90, 0, 340, 718] }, '\"' => { wx: 355, boundingbox: [168, 463, 438, 718] }, '#' => { wx: 556, boundingbox: [73, 0, 631, 688] }, '$' => { wx: 556, boundingbox: [69, -115, 617, 775] }, '%' => { wx: 889, boundingbox: [147, -19, 889, 703] }, '&' => { wx: 667, boundingbox: [77, -15, 647, 718] }, \"'\" => { wx: 222, boundingbox: [151, 463, 310, 718] }, '(' => { wx: 333, boundingbox: [108, -207, 454, 733] }, ')' => { wx: 333, boundingbox: [-9, -207, 337, 733] }, '*' => { wx: 389, boundingbox: [165, 431, 475, 718] }, '+' => { wx: 584, boundingbox: [85, 0, 606, 505] }, ',' => { wx: 278, boundingbox: [56, -147, 214, 106] }, '-' => { wx: 333, boundingbox: [93, 232, 357, 322] }, '.' => { wx: 278, boundingbox: [87, 0, 214, 106] }, '/' => { wx: 278, boundingbox: [-21, -19, 452, 737] }, '0' => { wx: 556, boundingbox: [93, -19, 608, 703] }, '1' => { wx: 556, boundingbox: [207, 0, 508, 703] }, '2' => { wx: 556, boundingbox: [26, 0, 617, 703] }, '3' => { wx: 556, boundingbox: [75, -19, 610, 703] }, '4' => { wx: 556, boundingbox: [61, 0, 576, 703] }, '5' => { wx: 556, boundingbox: [68, -19, 621, 688] }, '6' => { wx: 556, boundingbox: [91, -19, 615, 703] }, '7' => { wx: 556, boundingbox: [137, 0, 669, 688] }, '8' => { wx: 556, boundingbox: [74, -19, 607, 703] }, '9' => { wx: 556, boundingbox: [82, -19, 609, 703] }, ':' => { wx: 278, boundingbox: [87, 0, 301, 516] }, ';' => { wx: 278, boundingbox: [56, -147, 301, 516] }, '<' => { wx: 584, boundingbox: [94, 11, 641, 495] }, '=' => { wx: 584, boundingbox: [63, 115, 628, 390] }, '>' => { wx: 584, boundingbox: [50, 11, 597, 495] }, '?' => { wx: 556, boundingbox: [161, 0, 610, 727] }, '@' => { wx: 1015, boundingbox: [215, -19, 965, 737] }, 'A' => { wx: 667, boundingbox: [14, 0, 654, 718] }, 'B' => { wx: 667, boundingbox: [74, 0, 712, 718] }, 'C' => { wx: 722, boundingbox: [108, -19, 782, 737] }, 'D' => { wx: 722, boundingbox: [81, 0, 764, 718] }, 'E' => { wx: 667, boundingbox: [86, 0, 762, 718] }, 'F' => { wx: 611, boundingbox: [86, 0, 736, 718] }, 'G' => { wx: 778, boundingbox: [111, -19, 799, 737] }, 'H' => { wx: 722, boundingbox: [77, 0, 799, 718] }, 'I' => { wx: 278, boundingbox: [91, 0, 341, 718] }, 'J' => { wx: 500, boundingbox: [47, -19, 581, 718] }, 'K' => { wx: 667, boundingbox: [76, 0, 808, 718] }, 'L' => { wx: 556, boundingbox: [76, 0, 555, 718] }, 'M' => { wx: 833, boundingbox: [73, 0, 914, 718] }, 'N' => { wx: 722, boundingbox: [76, 0, 799, 718] }, 'O' => { wx: 778, boundingbox: [105, -19, 826, 737] }, 'P' => { wx: 667, boundingbox: [86, 0, 737, 718] }, 'Q' => { wx: 778, boundingbox: [105, -56, 826, 737] }, 'R' => { wx: 722, boundingbox: [88, 0, 773, 718] }, 'S' => { wx: 667, boundingbox: [90, -19, 713, 737] }, 'T' => { wx: 611, boundingbox: [148, 0, 750, 718] }, 'U' => { wx: 722, boundingbox: [123, -19, 797, 718] }, 'V' => { wx: 667, boundingbox: [173, 0, 800, 718] }, 'W' => { wx: 944, boundingbox: [169, 0, 1081, 718] }, 'X' => { wx: 667, boundingbox: [19, 0, 790, 718] }, 'Y' => { wx: 667, boundingbox: [167, 0, 806, 718] }, 'Z' => { wx: 611, boundingbox: [23, 0, 741, 718] }, '[' => { wx: 278, boundingbox: [21, -196, 403, 722] }, '\\\\' => { wx: 278, boundingbox: [140, -19, 291, 737] }, ']' => { wx: 278, boundingbox: [-14, -196, 368, 722] }, '^' => { wx: 469, boundingbox: [42, 264, 539, 688] }, '_' => { wx: 556, boundingbox: [-27, -125, 540, -75] }, '`' => { wx: 222, boundingbox: [165, 470, 323, 725] }, 'a' => { wx: 556, boundingbox: [61, -15, 559, 538] }, 'b' => { wx: 556, boundingbox: [58, -15, 584, 718] }, 'c' => { wx: 500, boundingbox: [74, -15, 553, 538] }, 'd' => { wx: 556, boundingbox: [84, -15, 652, 718] }, 'e' => { wx: 556, boundingbox: [84, -15, 578, 538] }, 'f' => { wx: 278, boundingbox: [86, 0, 416, 728] }, 'g' => { wx: 556, boundingbox: [42, -220, 610, 538] }, 'h' => { wx: 556, boundingbox: [65, 0, 573, 718] }, 'i' => { wx: 222, boundingbox: [67, 0, 308, 718] }, 'j' => { wx: 222, boundingbox: [-60, -210, 308, 718] }, 'k' => { wx: 500, boundingbox: [67, 0, 600, 718] }, 'l' => { wx: 222, boundingbox: [67, 0, 308, 718] }, 'm' => { wx: 833, boundingbox: [65, 0, 852, 538] }, 'n' => { wx: 556, boundingbox: [65, 0, 573, 538] }, 'o' => { wx: 556, boundingbox: [83, -14, 585, 538] }, 'p' => { wx: 556, boundingbox: [14, -207, 584, 538] }, 'q' => { wx: 556, boundingbox: [84, -207, 605, 538] }, 'r' => { wx: 333, boundingbox: [77, 0, 446, 538] }, 's' => { wx: 500, boundingbox: [63, -15, 529, 538] }, 't' => { wx: 278, boundingbox: [102, -7, 368, 669] }, 'u' => { wx: 556, boundingbox: [94, -15, 600, 523] }, 'v' => { wx: 500, boundingbox: [119, 0, 603, 523] }, 'w' => { wx: 722, boundingbox: [125, 0, 820, 523] }, 'x' => { wx: 500, boundingbox: [11, 0, 594, 523] }, 'y' => { wx: 500, boundingbox: [15, -214, 600, 523] }, 'z' => { wx: 500, boundingbox: [31, 0, 571, 523] }, '{' => { wx: 334, boundingbox: [92, -196, 445, 722] }, '|' => { wx: 260, boundingbox: [46, -225, 332, 775] }, '}' => { wx: 334, boundingbox: [0, -196, 354, 722] }, '~' => { wx: 584, boundingbox: [111, 180, 580, 326] }, \"\\u00A1\" => { wx: 333, boundingbox: [77, -195, 326, 523] }, \"\\u00A2\" => { wx: 556, boundingbox: [95, -115, 584, 623] }, \"\\u00A3\" => { wx: 556, boundingbox: [49, -16, 634, 718] }, \"\\u00A4\" => { wx: 167, boundingbox: [-170, -19, 482, 703] }, \"\\u00A5\" => { wx: 556, boundingbox: [81, 0, 699, 688] }, \"\\u00A6\" => { wx: 556, boundingbox: [-52, -207, 654, 737] }, \"\\u00A7\" => { wx: 556, boundingbox: [76, -191, 584, 737] }, \"\\u00A8\" => { wx: 556, boundingbox: [60, 99, 646, 603] }, \"\\u00A9\" => { wx: 191, boundingbox: [157, 463, 285, 718] }, \"\\u00AA\" => { wx: 333, boundingbox: [138, 470, 461, 725] }, \"\\u00AB\" => { wx: 556, boundingbox: [146, 108, 554, 446] }, \"\\u00AC\" => { wx: 333, boundingbox: [137, 108, 340, 446] }, \"\\u00AD\" => { wx: 333, boundingbox: [111, 108, 314, 446] }, \"\\u00AE\" => { wx: 500, boundingbox: [86, 0, 587, 728] }, \"\\u00AF\" => { wx: 500, boundingbox: [86, 0, 585, 728] }, \"\\u00B1\" => { wx: 556, boundingbox: [51, 240, 623, 313] }, \"\\u00B2\" => { wx: 556, boundingbox: [135, -159, 622, 718] }, \"\\u00B3\" => { wx: 556, boundingbox: [52, -159, 623, 718] }, \"\\u00B4\" => { wx: 278, boundingbox: [129, 190, 257, 315] }, \"\\u00B6\" => { wx: 537, boundingbox: [126, -173, 650, 718] }, \"\\u00B7\" => { wx: 350, boundingbox: [91, 202, 413, 517] }, \"\\u00B8\" => { wx: 222, boundingbox: [21, -149, 180, 106] }, \"\\u00B9\" => { wx: 333, boundingbox: [-6, -149, 318, 106] }, \"\\u00BA\" => { wx: 333, boundingbox: [124, 463, 448, 718] }, \"\\u00BB\" => { wx: 556, boundingbox: [120, 108, 528, 446] }, \"\\u00BC\" => { wx: 1000, boundingbox: [115, 0, 908, 106] }, \"\\u00BD\" => { wx: 1000, boundingbox: [88, -19, 1029, 703] }, \"\\u00BF\" => { wx: 611, boundingbox: [85, -201, 534, 525] }, \"\\u00C1\" => { wx: 333, boundingbox: [170, 593, 337, 734] }, \"\\u00C2\" => { wx: 333, boundingbox: [248, 593, 475, 734] }, \"\\u00C3\" => { wx: 333, boundingbox: [147, 593, 438, 734] }, \"\\u00C4\" => { wx: 333, boundingbox: [125, 606, 490, 722] }, \"\\u00C5\" => { wx: 333, boundingbox: [143, 627, 468, 684] }, \"\\u00C6\" => { wx: 333, boundingbox: [167, 595, 476, 731] }, \"\\u00C7\" => { wx: 333, boundingbox: [249, 604, 362, 706] }, \"\\u00C8\" => { wx: 333, boundingbox: [168, 604, 443, 706] }, \"\\u00CA\" => { wx: 333, boundingbox: [214, 572, 402, 756] }, \"\\u00CB\" => { wx: 333, boundingbox: [2, -225, 232, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [157, 593, 565, 734] }, \"\\u00CE\" => { wx: 333, boundingbox: [43, -225, 249, 0] }, \"\\u00CF\" => { wx: 333, boundingbox: [177, 593, 468, 734] }, \"\\u00D0\" => { wx: 1000, boundingbox: [51, 240, 1067, 313] }, \"\\u00E1\" => { wx: 1000, boundingbox: [8, 0, 1097, 718] }, \"\\u00E3\" => { wx: 370, boundingbox: [127, 405, 449, 737] }, \"\\u00E8\" => { wx: 556, boundingbox: [41, 0, 555, 718] }, \"\\u00E9\" => { wx: 778, boundingbox: [43, -19, 890, 737] }, \"\\u00EA\" => { wx: 1000, boundingbox: [98, -19, 1116, 737] }, \"\\u00EB\" => { wx: 365, boundingbox: [141, 405, 468, 737] }, \"\\u00F1\" => { wx: 889, boundingbox: [61, -15, 909, 538] }, \"\\u00F5\" => { wx: 278, boundingbox: [95, 0, 294, 523] }, \"\\u00F8\" => { wx: 222, boundingbox: [41, 0, 347, 718] }, \"\\u00F9\" => { wx: 611, boundingbox: [29, -22, 647, 545] }, \"\\u00FA\" => { wx: 944, boundingbox: [83, -15, 964, 538] }, \"\\u00FB\" => { wx: 611, boundingbox: [67, -15, 658, 728] }, \"\\xFF\" => { wx: 556, boundingbox: [0, 0, 0, 0] } }\n helvetica_oblique_metrics = { ' ' => { wx: 278, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 278, boundingbox: [90, 0, 340, 718] }, '\"' => { wx: 355, boundingbox: [168, 463, 438, 718] }, '#' => { wx: 556, boundingbox: [73, 0, 631, 688] }, '$' => { wx: 556, boundingbox: [69, -115, 617, 775] }, '%' => { wx: 889, boundingbox: [147, -19, 889, 703] }, '&' => { wx: 667, boundingbox: [77, -15, 647, 718] }, \"'\" => { wx: 222, boundingbox: [151, 463, 310, 718] }, '(' => { wx: 333, boundingbox: [108, -207, 454, 733] }, ')' => { wx: 333, boundingbox: [-9, -207, 337, 733] }, '*' => { wx: 389, boundingbox: [165, 431, 475, 718] }, '+' => { wx: 584, boundingbox: [85, 0, 606, 505] }, ',' => { wx: 278, boundingbox: [56, -147, 214, 106] }, '-' => { wx: 333, boundingbox: [93, 232, 357, 322] }, '.' => { wx: 278, boundingbox: [87, 0, 214, 106] }, '/' => { wx: 278, boundingbox: [-21, -19, 452, 737] }, '0' => { wx: 556, boundingbox: [93, -19, 608, 703] }, '1' => { wx: 556, boundingbox: [207, 0, 508, 703] }, '2' => { wx: 556, boundingbox: [26, 0, 617, 703] }, '3' => { wx: 556, boundingbox: [75, -19, 610, 703] }, '4' => { wx: 556, boundingbox: [61, 0, 576, 703] }, '5' => { wx: 556, boundingbox: [68, -19, 621, 688] }, '6' => { wx: 556, boundingbox: [91, -19, 615, 703] }, '7' => { wx: 556, boundingbox: [137, 0, 669, 688] }, '8' => { wx: 556, boundingbox: [74, -19, 607, 703] }, '9' => { wx: 556, boundingbox: [82, -19, 609, 703] }, ':' => { wx: 278, boundingbox: [87, 0, 301, 516] }, ';' => { wx: 278, boundingbox: [56, -147, 301, 516] }, '<' => { wx: 584, boundingbox: [94, 11, 641, 495] }, '=' => { wx: 584, boundingbox: [63, 115, 628, 390] }, '>' => { wx: 584, boundingbox: [50, 11, 597, 495] }, '?' => { wx: 556, boundingbox: [161, 0, 610, 727] }, '@' => { wx: 1015, boundingbox: [215, -19, 965, 737] }, 'A' => { wx: 667, boundingbox: [14, 0, 654, 718] }, 'B' => { wx: 667, boundingbox: [74, 0, 712, 718] }, 'C' => { wx: 722, boundingbox: [108, -19, 782, 737] }, 'D' => { wx: 722, boundingbox: [81, 0, 764, 718] }, 'E' => { wx: 667, boundingbox: [86, 0, 762, 718] }, 'F' => { wx: 611, boundingbox: [86, 0, 736, 718] }, 'G' => { wx: 778, boundingbox: [111, -19, 799, 737] }, 'H' => { wx: 722, boundingbox: [77, 0, 799, 718] }, 'I' => { wx: 278, boundingbox: [91, 0, 341, 718] }, 'J' => { wx: 500, boundingbox: [47, -19, 581, 718] }, 'K' => { wx: 667, boundingbox: [76, 0, 808, 718] }, 'L' => { wx: 556, boundingbox: [76, 0, 555, 718] }, 'M' => { wx: 833, boundingbox: [73, 0, 914, 718] }, 'N' => { wx: 722, boundingbox: [76, 0, 799, 718] }, 'O' => { wx: 778, boundingbox: [105, -19, 826, 737] }, 'P' => { wx: 667, boundingbox: [86, 0, 737, 718] }, 'Q' => { wx: 778, boundingbox: [105, -56, 826, 737] }, 'R' => { wx: 722, boundingbox: [88, 0, 773, 718] }, 'S' => { wx: 667, boundingbox: [90, -19, 713, 737] }, 'T' => { wx: 611, boundingbox: [148, 0, 750, 718] }, 'U' => { wx: 722, boundingbox: [123, -19, 797, 718] }, 'V' => { wx: 667, boundingbox: [173, 0, 800, 718] }, 'W' => { wx: 944, boundingbox: [169, 0, 1081, 718] }, 'X' => { wx: 667, boundingbox: [19, 0, 790, 718] }, 'Y' => { wx: 667, boundingbox: [167, 0, 806, 718] }, 'Z' => { wx: 611, boundingbox: [23, 0, 741, 718] }, '[' => { wx: 278, boundingbox: [21, -196, 403, 722] }, '\\\\' => { wx: 278, boundingbox: [140, -19, 291, 737] }, ']' => { wx: 278, boundingbox: [-14, -196, 368, 722] }, '^' => { wx: 469, boundingbox: [42, 264, 539, 688] }, '_' => { wx: 556, boundingbox: [-27, -125, 540, -75] }, '`' => { wx: 222, boundingbox: [165, 470, 323, 725] }, 'a' => { wx: 556, boundingbox: [61, -15, 559, 538] }, 'b' => { wx: 556, boundingbox: [58, -15, 584, 718] }, 'c' => { wx: 500, boundingbox: [74, -15, 553, 538] }, 'd' => { wx: 556, boundingbox: [84, -15, 652, 718] }, 'e' => { wx: 556, boundingbox: [84, -15, 578, 538] }, 'f' => { wx: 278, boundingbox: [86, 0, 416, 728] }, 'g' => { wx: 556, boundingbox: [42, -220, 610, 538] }, 'h' => { wx: 556, boundingbox: [65, 0, 573, 718] }, 'i' => { wx: 222, boundingbox: [67, 0, 308, 718] }, 'j' => { wx: 222, boundingbox: [-60, -210, 308, 718] }, 'k' => { wx: 500, boundingbox: [67, 0, 600, 718] }, 'l' => { wx: 222, boundingbox: [67, 0, 308, 718] }, 'm' => { wx: 833, boundingbox: [65, 0, 852, 538] }, 'n' => { wx: 556, boundingbox: [65, 0, 573, 538] }, 'o' => { wx: 556, boundingbox: [83, -14, 585, 538] }, 'p' => { wx: 556, boundingbox: [14, -207, 584, 538] }, 'q' => { wx: 556, boundingbox: [84, -207, 605, 538] }, 'r' => { wx: 333, boundingbox: [77, 0, 446, 538] }, 's' => { wx: 500, boundingbox: [63, -15, 529, 538] }, 't' => { wx: 278, boundingbox: [102, -7, 368, 669] }, 'u' => { wx: 556, boundingbox: [94, -15, 600, 523] }, 'v' => { wx: 500, boundingbox: [119, 0, 603, 523] }, 'w' => { wx: 722, boundingbox: [125, 0, 820, 523] }, 'x' => { wx: 500, boundingbox: [11, 0, 594, 523] }, 'y' => { wx: 500, boundingbox: [15, -214, 600, 523] }, 'z' => { wx: 500, boundingbox: [31, 0, 571, 523] }, '{' => { wx: 334, boundingbox: [92, -196, 445, 722] }, '|' => { wx: 260, boundingbox: [46, -225, 332, 775] }, '}' => { wx: 334, boundingbox: [0, -196, 354, 722] }, '~' => { wx: 584, boundingbox: [111, 180, 580, 326] }, \"\\u00A1\" => { wx: 333, boundingbox: [77, -195, 326, 523] }, \"\\u00A2\" => { wx: 556, boundingbox: [95, -115, 584, 623] }, \"\\u00A3\" => { wx: 556, boundingbox: [49, -16, 634, 718] }, \"\\u00A4\" => { wx: 167, boundingbox: [-170, -19, 482, 703] }, \"\\u00A5\" => { wx: 556, boundingbox: [81, 0, 699, 688] }, \"\\u00A6\" => { wx: 556, boundingbox: [-52, -207, 654, 737] }, \"\\u00A7\" => { wx: 556, boundingbox: [76, -191, 584, 737] }, \"\\u00A8\" => { wx: 556, boundingbox: [60, 99, 646, 603] }, \"\\u00A9\" => { wx: 191, boundingbox: [157, 463, 285, 718] }, \"\\u00AA\" => { wx: 333, boundingbox: [138, 470, 461, 725] }, \"\\u00AB\" => { wx: 556, boundingbox: [146, 108, 554, 446] }, \"\\u00AC\" => { wx: 333, boundingbox: [137, 108, 340, 446] }, \"\\u00AD\" => { wx: 333, boundingbox: [111, 108, 314, 446] }, \"\\u00AE\" => { wx: 500, boundingbox: [86, 0, 587, 728] }, \"\\u00AF\" => { wx: 500, boundingbox: [86, 0, 585, 728] }, \"\\u00B1\" => { wx: 556, boundingbox: [51, 240, 623, 313] }, \"\\u00B2\" => { wx: 556, boundingbox: [135, -159, 622, 718] }, \"\\u00B3\" => { wx: 556, boundingbox: [52, -159, 623, 718] }, \"\\u00B4\" => { wx: 278, boundingbox: [129, 190, 257, 315] }, \"\\u00B6\" => { wx: 537, boundingbox: [126, -173, 650, 718] }, \"\\u00B7\" => { wx: 350, boundingbox: [91, 202, 413, 517] }, \"\\u00B8\" => { wx: 222, boundingbox: [21, -149, 180, 106] }, \"\\u00B9\" => { wx: 333, boundingbox: [-6, -149, 318, 106] }, \"\\u00BA\" => { wx: 333, boundingbox: [124, 463, 448, 718] }, \"\\u00BB\" => { wx: 556, boundingbox: [120, 108, 528, 446] }, \"\\u00BC\" => { wx: 1000, boundingbox: [115, 0, 908, 106] }, \"\\u00BD\" => { wx: 1000, boundingbox: [88, -19, 1029, 703] }, \"\\u00BF\" => { wx: 611, boundingbox: [85, -201, 534, 525] }, \"\\u00C1\" => { wx: 333, boundingbox: [170, 593, 337, 734] }, \"\\u00C2\" => { wx: 333, boundingbox: [248, 593, 475, 734] }, \"\\u00C3\" => { wx: 333, boundingbox: [147, 593, 438, 734] }, \"\\u00C4\" => { wx: 333, boundingbox: [125, 606, 490, 722] }, \"\\u00C5\" => { wx: 333, boundingbox: [143, 627, 468, 684] }, \"\\u00C6\" => { wx: 333, boundingbox: [167, 595, 476, 731] }, \"\\u00C7\" => { wx: 333, boundingbox: [249, 604, 362, 706] }, \"\\u00C8\" => { wx: 333, boundingbox: [168, 604, 443, 706] }, \"\\u00CA\" => { wx: 333, boundingbox: [214, 572, 402, 756] }, \"\\u00CB\" => { wx: 333, boundingbox: [2, -225, 232, 0] }, \"\\u00CD\" => { wx: 333, boundingbox: [157, 593, 565, 734] }, \"\\u00CE\" => { wx: 333, boundingbox: [43, -225, 249, 0] }, \"\\u00CF\" => { wx: 333, boundingbox: [177, 593, 468, 734] }, \"\\u00D0\" => { wx: 1000, boundingbox: [51, 240, 1067, 313] }, \"\\u00E1\" => { wx: 1000, boundingbox: [8, 0, 1097, 718] }, \"\\u00E3\" => { wx: 370, boundingbox: [127, 405, 449, 737] }, \"\\u00E8\" => { wx: 556, boundingbox: [41, 0, 555, 718] }, \"\\u00E9\" => { wx: 778, boundingbox: [43, -19, 890, 737] }, \"\\u00EA\" => { wx: 1000, boundingbox: [98, -19, 1116, 737] }, \"\\u00EB\" => { wx: 365, boundingbox: [141, 405, 468, 737] }, \"\\u00F1\" => { wx: 889, boundingbox: [61, -15, 909, 538] }, \"\\u00F5\" => { wx: 278, boundingbox: [95, 0, 294, 523] }, \"\\u00F8\" => { wx: 222, boundingbox: [41, 0, 347, 718] }, \"\\u00F9\" => { wx: 611, boundingbox: [29, -22, 647, 545] }, \"\\u00FA\" => { wx: 944, boundingbox: [83, -15, 964, 538] }, \"\\u00FB\" => { wx: 611, boundingbox: [67, -15, 658, 728] }, \"\\xFF\" => { wx: 556, boundingbox: [0, 0, 0, 0] } }\n courier_metrics = { ' ' => { wx: 600, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 600, boundingbox: [236, -15, 364, 572] }, '\"' => { wx: 600, boundingbox: [187, 328, 413, 562] }, '#' => { wx: 600, boundingbox: [93, -32, 507, 639] }, '$' => { wx: 600, boundingbox: [105, -126, 496, 662] }, '%' => { wx: 600, boundingbox: [81, -15, 518, 622] }, '&' => { wx: 600, boundingbox: [63, -15, 538, 543] }, \"'\" => { wx: 600, boundingbox: [213, 328, 376, 562] }, '(' => { wx: 600, boundingbox: [269, -108, 440, 622] }, ')' => { wx: 600, boundingbox: [160, -108, 331, 622] }, '*' => { wx: 600, boundingbox: [116, 257, 484, 607] }, '+' => { wx: 600, boundingbox: [80, 44, 520, 470] }, ',' => { wx: 600, boundingbox: [181, -112, 344, 122] }, '-' => { wx: 600, boundingbox: [103, 231, 497, 285] }, '.' => { wx: 600, boundingbox: [229, -15, 371, 109] }, '/' => { wx: 600, boundingbox: [125, -80, 475, 629] }, '0' => { wx: 600, boundingbox: [106, -15, 494, 622] }, '1' => { wx: 600, boundingbox: [96, 0, 505, 622] }, '2' => { wx: 600, boundingbox: [70, 0, 471, 622] }, '3' => { wx: 600, boundingbox: [75, -15, 466, 622] }, '4' => { wx: 600, boundingbox: [78, 0, 500, 622] }, '5' => { wx: 600, boundingbox: [92, -15, 497, 607] }, '6' => { wx: 600, boundingbox: [111, -15, 497, 622] }, '7' => { wx: 600, boundingbox: [82, 0, 483, 607] }, '8' => { wx: 600, boundingbox: [102, -15, 498, 622] }, '9' => { wx: 600, boundingbox: [96, -15, 489, 622] }, ':' => { wx: 600, boundingbox: [229, -15, 371, 385] }, ';' => { wx: 600, boundingbox: [181, -112, 371, 385] }, '<' => { wx: 600, boundingbox: [41, 42, 519, 472] }, '=' => { wx: 600, boundingbox: [80, 138, 520, 376] }, '>' => { wx: 600, boundingbox: [66, 42, 544, 472] }, '?' => { wx: 600, boundingbox: [129, -15, 492, 572] }, '@' => { wx: 600, boundingbox: [77, -15, 533, 622] }, 'A' => { wx: 600, boundingbox: [3, 0, 597, 562] }, 'B' => { wx: 600, boundingbox: [43, 0, 559, 562] }, 'C' => { wx: 600, boundingbox: [41, -18, 540, 580] }, 'D' => { wx: 600, boundingbox: [43, 0, 574, 562] }, 'E' => { wx: 600, boundingbox: [53, 0, 550, 562] }, 'F' => { wx: 600, boundingbox: [53, 0, 545, 562] }, 'G' => { wx: 600, boundingbox: [31, -18, 575, 580] }, 'H' => { wx: 600, boundingbox: [32, 0, 568, 562] }, 'I' => { wx: 600, boundingbox: [96, 0, 504, 562] }, 'J' => { wx: 600, boundingbox: [34, -18, 566, 562] }, 'K' => { wx: 600, boundingbox: [38, 0, 582, 562] }, 'L' => { wx: 600, boundingbox: [47, 0, 554, 562] }, 'M' => { wx: 600, boundingbox: [4, 0, 596, 562] }, 'N' => { wx: 600, boundingbox: [7, -13, 593, 562] }, 'O' => { wx: 600, boundingbox: [43, -18, 557, 580] }, 'P' => { wx: 600, boundingbox: [79, 0, 558, 562] }, 'Q' => { wx: 600, boundingbox: [43, -138, 557, 580] }, 'R' => { wx: 600, boundingbox: [38, 0, 588, 562] }, 'S' => { wx: 600, boundingbox: [72, -20, 529, 580] }, 'T' => { wx: 600, boundingbox: [38, 0, 563, 562] }, 'U' => { wx: 600, boundingbox: [17, -18, 583, 562] }, 'V' => { wx: 600, boundingbox: [-4, -13, 604, 562] }, 'W' => { wx: 600, boundingbox: [-3, -13, 603, 562] }, 'X' => { wx: 600, boundingbox: [23, 0, 577, 562] }, 'Y' => { wx: 600, boundingbox: [24, 0, 576, 562] }, 'Z' => { wx: 600, boundingbox: [86, 0, 514, 562] }, '[' => { wx: 600, boundingbox: [269, -108, 442, 622] }, '\\\\' => { wx: 600, boundingbox: [118, -80, 482, 629] }, ']' => { wx: 600, boundingbox: [158, -108, 331, 622] }, '^' => { wx: 600, boundingbox: [94, 354, 506, 622] }, '_' => { wx: 600, boundingbox: [0, -125, 600, -75] }, '`' => { wx: 600, boundingbox: [224, 328, 387, 562] }, 'a' => { wx: 600, boundingbox: [53, -15, 559, 441] }, 'b' => { wx: 600, boundingbox: [14, -15, 575, 629] }, 'c' => { wx: 600, boundingbox: [66, -15, 529, 441] }, 'd' => { wx: 600, boundingbox: [45, -15, 591, 629] }, 'e' => { wx: 600, boundingbox: [66, -15, 548, 441] }, 'f' => { wx: 600, boundingbox: [114, 0, 531, 629] }, 'g' => { wx: 600, boundingbox: [45, -157, 566, 441] }, 'h' => { wx: 600, boundingbox: [18, 0, 582, 629] }, 'i' => { wx: 600, boundingbox: [95, 0, 505, 657] }, 'j' => { wx: 600, boundingbox: [82, -157, 410, 657] }, 'k' => { wx: 600, boundingbox: [43, 0, 580, 629] }, 'l' => { wx: 600, boundingbox: [95, 0, 505, 629] }, 'm' => { wx: 600, boundingbox: [-5, 0, 605, 441] }, 'n' => { wx: 600, boundingbox: [26, 0, 575, 441] }, 'o' => { wx: 600, boundingbox: [62, -15, 538, 441] }, 'p' => { wx: 600, boundingbox: [9, -157, 555, 441] }, 'q' => { wx: 600, boundingbox: [45, -157, 591, 441] }, 'r' => { wx: 600, boundingbox: [60, 0, 559, 441] }, 's' => { wx: 600, boundingbox: [80, -15, 513, 441] }, 't' => { wx: 600, boundingbox: [87, -15, 530, 561] }, 'u' => { wx: 600, boundingbox: [21, -15, 562, 426] }, 'v' => { wx: 600, boundingbox: [10, -10, 590, 426] }, 'w' => { wx: 600, boundingbox: [-4, -10, 604, 426] }, 'x' => { wx: 600, boundingbox: [20, 0, 580, 426] }, 'y' => { wx: 600, boundingbox: [7, -157, 592, 426] }, 'z' => { wx: 600, boundingbox: [99, 0, 502, 426] }, '{' => { wx: 600, boundingbox: [182, -108, 437, 622] }, '|' => { wx: 600, boundingbox: [275, -250, 326, 750] }, '}' => { wx: 600, boundingbox: [163, -108, 418, 622] }, '~' => { wx: 600, boundingbox: [63, 197, 540, 320] }, \"\\u00A1\" => { wx: 600, boundingbox: [236, -157, 364, 430] }, \"\\u00A2\" => { wx: 600, boundingbox: [96, -49, 500, 614] }, \"\\u00A3\" => { wx: 600, boundingbox: [84, -21, 521, 611] }, \"\\u00A4\" => { wx: 600, boundingbox: [92, -57, 509, 665] }, \"\\u00A5\" => { wx: 600, boundingbox: [26, 0, 574, 562] }, \"\\u00A6\" => { wx: 600, boundingbox: [4, -143, 539, 622] }, \"\\u00A7\" => { wx: 600, boundingbox: [113, -78, 488, 580] }, \"\\u00A8\" => { wx: 600, boundingbox: [73, 58, 527, 506] }, \"\\u00A9\" => { wx: 600, boundingbox: [259, 328, 341, 562] }, \"\\u00AA\" => { wx: 600, boundingbox: [143, 328, 471, 562] }, \"\\u00AB\" => { wx: 600, boundingbox: [37, 70, 563, 446] }, \"\\u00AC\" => { wx: 600, boundingbox: [149, 70, 451, 446] }, \"\\u00AD\" => { wx: 600, boundingbox: [149, 70, 451, 446] }, \"\\u00AE\" => { wx: 600, boundingbox: [3, 0, 597, 629] }, \"\\u00AF\" => { wx: 600, boundingbox: [3, 0, 597, 629] }, \"\\u00B1\" => { wx: 600, boundingbox: [75, 231, 525, 285] }, \"\\u00B2\" => { wx: 600, boundingbox: [141, -78, 459, 580] }, \"\\u00B3\" => { wx: 600, boundingbox: [141, -78, 459, 580] }, \"\\u00B4\" => { wx: 600, boundingbox: [222, 189, 378, 327] }, \"\\u00B6\" => { wx: 600, boundingbox: [50, -78, 511, 562] }, \"\\u00B7\" => { wx: 600, boundingbox: [172, 130, 428, 383] }, \"\\u00B8\" => { wx: 600, boundingbox: [213, -134, 376, 100] }, \"\\u00B9\" => { wx: 600, boundingbox: [143, -134, 457, 100] }, \"\\u00BA\" => { wx: 600, boundingbox: [143, 328, 457, 562] }, \"\\u00BB\" => { wx: 600, boundingbox: [37, 70, 563, 446] }, \"\\u00BC\" => { wx: 600, boundingbox: [37, -15, 563, 111] }, \"\\u00BD\" => { wx: 600, boundingbox: [3, -15, 600, 622] }, \"\\u00BF\" => { wx: 600, boundingbox: [108, -157, 471, 430] }, \"\\u00C1\" => { wx: 600, boundingbox: [151, 497, 378, 672] }, \"\\u00C2\" => { wx: 600, boundingbox: [242, 497, 469, 672] }, \"\\u00C3\" => { wx: 600, boundingbox: [124, 477, 476, 654] }, \"\\u00C4\" => { wx: 600, boundingbox: [105, 489, 503, 606] }, \"\\u00C5\" => { wx: 600, boundingbox: [120, 525, 480, 565] }, \"\\u00C6\" => { wx: 600, boundingbox: [153, 501, 447, 609] }, \"\\u00C7\" => { wx: 600, boundingbox: [249, 537, 352, 640] }, \"\\u00C8\" => { wx: 600, boundingbox: [148, 537, 453, 640] }, \"\\u00CA\" => { wx: 600, boundingbox: [218, 463, 382, 627] }, \"\\u00CB\" => { wx: 600, boundingbox: [224, -151, 362, 10] }, \"\\u00CD\" => { wx: 600, boundingbox: [133, 497, 540, 672] }, \"\\u00CE\" => { wx: 600, boundingbox: [211, -172, 407, 4] }, \"\\u00CF\" => { wx: 600, boundingbox: [124, 492, 476, 669] }, \"\\u00D0\" => { wx: 600, boundingbox: [0, 231, 600, 285] }, \"\\u00E1\" => { wx: 600, boundingbox: [3, 0, 550, 562] }, \"\\u00E3\" => { wx: 600, boundingbox: [156, 249, 442, 580] }, \"\\u00E8\" => { wx: 600, boundingbox: [47, 0, 554, 562] }, \"\\u00E9\" => { wx: 600, boundingbox: [43, -80, 557, 629] }, \"\\u00EA\" => { wx: 600, boundingbox: [7, 0, 567, 562] }, \"\\u00EB\" => { wx: 600, boundingbox: [157, 249, 443, 580] }, \"\\u00F1\" => { wx: 600, boundingbox: [19, -15, 570, 441] }, \"\\u00F5\" => { wx: 600, boundingbox: [95, 0, 505, 426] }, \"\\u00F8\" => { wx: 600, boundingbox: [95, 0, 505, 629] }, \"\\u00F9\" => { wx: 600, boundingbox: [62, -80, 538, 506] }, \"\\u00FA\" => { wx: 600, boundingbox: [19, -15, 559, 441] }, \"\\u00FB\" => { wx: 600, boundingbox: [48, -15, 588, 629] }, \"\\xFF\" => { wx: 600, boundingbox: [0, 0, 0, 0] } }\n courier_bold_metrics = { ' ' => { wx: 600, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 600, boundingbox: [202, -15, 398, 572] }, '\"' => { wx: 600, boundingbox: [135, 277, 465, 562] }, '#' => { wx: 600, boundingbox: [56, -45, 544, 651] }, '$' => { wx: 600, boundingbox: [82, -126, 519, 666] }, '%' => { wx: 600, boundingbox: [5, -15, 595, 616] }, '&' => { wx: 600, boundingbox: [36, -15, 546, 543] }, \"'\" => { wx: 600, boundingbox: [171, 277, 423, 562] }, '(' => { wx: 600, boundingbox: [219, -102, 461, 616] }, ')' => { wx: 600, boundingbox: [139, -102, 381, 616] }, '*' => { wx: 600, boundingbox: [91, 219, 509, 601] }, '+' => { wx: 600, boundingbox: [71, 39, 529, 478] }, ',' => { wx: 600, boundingbox: [123, -111, 393, 174] }, '-' => { wx: 600, boundingbox: [100, 203, 500, 313] }, '.' => { wx: 600, boundingbox: [192, -15, 408, 171] }, '/' => { wx: 600, boundingbox: [98, -77, 502, 626] }, '0' => { wx: 600, boundingbox: [87, -15, 513, 616] }, '1' => { wx: 600, boundingbox: [81, 0, 539, 616] }, '2' => { wx: 600, boundingbox: [61, 0, 499, 616] }, '3' => { wx: 600, boundingbox: [63, -15, 501, 616] }, '4' => { wx: 600, boundingbox: [53, 0, 507, 616] }, '5' => { wx: 600, boundingbox: [70, -15, 521, 601] }, '6' => { wx: 600, boundingbox: [90, -15, 521, 616] }, '7' => { wx: 600, boundingbox: [55, 0, 494, 601] }, '8' => { wx: 600, boundingbox: [83, -15, 517, 616] }, '9' => { wx: 600, boundingbox: [79, -15, 510, 616] }, ':' => { wx: 600, boundingbox: [191, -15, 407, 425] }, ';' => { wx: 600, boundingbox: [123, -111, 408, 425] }, '<' => { wx: 600, boundingbox: [66, 15, 523, 501] }, '=' => { wx: 600, boundingbox: [71, 118, 529, 398] }, '>' => { wx: 600, boundingbox: [77, 15, 534, 501] }, '?' => { wx: 600, boundingbox: [98, -14, 501, 580] }, '@' => { wx: 600, boundingbox: [16, -15, 584, 616] }, 'A' => { wx: 600, boundingbox: [-9, 0, 609, 562] }, 'B' => { wx: 600, boundingbox: [30, 0, 573, 562] }, 'C' => { wx: 600, boundingbox: [22, -18, 560, 580] }, 'D' => { wx: 600, boundingbox: [30, 0, 594, 562] }, 'E' => { wx: 600, boundingbox: [25, 0, 560, 562] }, 'F' => { wx: 600, boundingbox: [39, 0, 570, 562] }, 'G' => { wx: 600, boundingbox: [22, -18, 594, 580] }, 'H' => { wx: 600, boundingbox: [20, 0, 580, 562] }, 'I' => { wx: 600, boundingbox: [77, 0, 523, 562] }, 'J' => { wx: 600, boundingbox: [37, -18, 601, 562] }, 'K' => { wx: 600, boundingbox: [21, 0, 599, 562] }, 'L' => { wx: 600, boundingbox: [39, 0, 578, 562] }, 'M' => { wx: 600, boundingbox: [-2, 0, 602, 562] }, 'N' => { wx: 600, boundingbox: [8, -12, 610, 562] }, 'O' => { wx: 600, boundingbox: [22, -18, 578, 580] }, 'P' => { wx: 600, boundingbox: [48, 0, 559, 562] }, 'Q' => { wx: 600, boundingbox: [32, -138, 578, 580] }, 'R' => { wx: 600, boundingbox: [24, 0, 599, 562] }, 'S' => { wx: 600, boundingbox: [47, -22, 553, 582] }, 'T' => { wx: 600, boundingbox: [21, 0, 579, 562] }, 'U' => { wx: 600, boundingbox: [4, -18, 596, 562] }, 'V' => { wx: 600, boundingbox: [-13, 0, 613, 562] }, 'W' => { wx: 600, boundingbox: [-18, 0, 618, 562] }, 'X' => { wx: 600, boundingbox: [12, 0, 588, 562] }, 'Y' => { wx: 600, boundingbox: [12, 0, 589, 562] }, 'Z' => { wx: 600, boundingbox: [62, 0, 539, 562] }, '[' => { wx: 600, boundingbox: [245, -102, 475, 616] }, '\\\\' => { wx: 600, boundingbox: [99, -77, 503, 626] }, ']' => { wx: 600, boundingbox: [125, -102, 355, 616] }, '^' => { wx: 600, boundingbox: [108, 250, 492, 616] }, '_' => { wx: 600, boundingbox: [0, -125, 600, -75] }, '`' => { wx: 600, boundingbox: [178, 277, 428, 562] }, 'a' => { wx: 600, boundingbox: [35, -15, 570, 454] }, 'b' => { wx: 600, boundingbox: [0, -15, 584, 626] }, 'c' => { wx: 600, boundingbox: [40, -15, 545, 459] }, 'd' => { wx: 600, boundingbox: [20, -15, 591, 626] }, 'e' => { wx: 600, boundingbox: [40, -15, 563, 454] }, 'f' => { wx: 600, boundingbox: [83, 0, 547, 626] }, 'g' => { wx: 600, boundingbox: [30, -146, 580, 454] }, 'h' => { wx: 600, boundingbox: [5, 0, 592, 626] }, 'i' => { wx: 600, boundingbox: [77, 0, 523, 658] }, 'j' => { wx: 600, boundingbox: [63, -146, 440, 658] }, 'k' => { wx: 600, boundingbox: [20, 0, 585, 626] }, 'l' => { wx: 600, boundingbox: [77, 0, 523, 626] }, 'm' => { wx: 600, boundingbox: [-22, 0, 626, 454] }, 'n' => { wx: 600, boundingbox: [18, 0, 592, 454] }, 'o' => { wx: 600, boundingbox: [30, -15, 570, 454] }, 'p' => { wx: 600, boundingbox: [-1, -142, 570, 454] }, 'q' => { wx: 600, boundingbox: [20, -142, 591, 454] }, 'r' => { wx: 600, boundingbox: [47, 0, 580, 454] }, 's' => { wx: 600, boundingbox: [68, -17, 535, 459] }, 't' => { wx: 600, boundingbox: [47, -15, 532, 562] }, 'u' => { wx: 600, boundingbox: [-1, -15, 569, 439] }, 'v' => { wx: 600, boundingbox: [-1, 0, 601, 439] }, 'w' => { wx: 600, boundingbox: [-18, 0, 618, 439] }, 'x' => { wx: 600, boundingbox: [6, 0, 594, 439] }, 'y' => { wx: 600, boundingbox: [-4, -142, 601, 439] }, 'z' => { wx: 600, boundingbox: [81, 0, 520, 439] }, '{' => { wx: 600, boundingbox: [160, -102, 464, 616] }, '|' => { wx: 600, boundingbox: [255, -250, 345, 750] }, '}' => { wx: 600, boundingbox: [136, -102, 440, 616] }, '~' => { wx: 600, boundingbox: [71, 153, 530, 356] }, \"\\u00A1\" => { wx: 600, boundingbox: [202, -146, 398, 449] }, \"\\u00A2\" => { wx: 600, boundingbox: [66, -49, 518, 614] }, \"\\u00A3\" => { wx: 600, boundingbox: [72, -28, 558, 611] }, \"\\u00A4\" => { wx: 600, boundingbox: [25, -60, 576, 661] }, \"\\u00A5\" => { wx: 600, boundingbox: [10, 0, 590, 562] }, \"\\u00A6\" => { wx: 600, boundingbox: [-30, -131, 572, 616] }, \"\\u00A7\" => { wx: 600, boundingbox: [83, -70, 517, 580] }, \"\\u00A8\" => { wx: 600, boundingbox: [54, 49, 546, 517] }, \"\\u00A9\" => { wx: 600, boundingbox: [227, 277, 373, 562] }, \"\\u00AA\" => { wx: 600, boundingbox: [71, 277, 535, 562] }, \"\\u00AB\" => { wx: 600, boundingbox: [8, 70, 553, 446] }, \"\\u00AC\" => { wx: 600, boundingbox: [141, 70, 459, 446] }, \"\\u00AD\" => { wx: 600, boundingbox: [141, 70, 459, 446] }, \"\\u00AE\" => { wx: 600, boundingbox: [12, 0, 593, 626] }, \"\\u00AF\" => { wx: 600, boundingbox: [12, 0, 593, 626] }, \"\\u00B1\" => { wx: 600, boundingbox: [65, 203, 535, 313] }, \"\\u00B2\" => { wx: 600, boundingbox: [106, -70, 494, 580] }, \"\\u00B3\" => { wx: 600, boundingbox: [106, -70, 494, 580] }, \"\\u00B4\" => { wx: 600, boundingbox: [196, 165, 404, 351] }, \"\\u00B6\" => { wx: 600, boundingbox: [6, -70, 576, 580] }, \"\\u00B7\" => { wx: 600, boundingbox: [140, 132, 460, 430] }, \"\\u00B8\" => { wx: 600, boundingbox: [175, -142, 427, 143] }, \"\\u00B9\" => { wx: 600, boundingbox: [65, -142, 529, 143] }, \"\\u00BA\" => { wx: 600, boundingbox: [61, 277, 525, 562] }, \"\\u00BB\" => { wx: 600, boundingbox: [47, 70, 592, 446] }, \"\\u00BC\" => { wx: 600, boundingbox: [26, -15, 574, 116] }, \"\\u00BD\" => { wx: 600, boundingbox: [-113, -15, 713, 616] }, \"\\u00BF\" => { wx: 600, boundingbox: [99, -146, 502, 449] }, \"\\u00C1\" => { wx: 600, boundingbox: [132, 508, 395, 661] }, \"\\u00C2\" => { wx: 600, boundingbox: [205, 508, 468, 661] }, \"\\u00C3\" => { wx: 600, boundingbox: [103, 483, 497, 657] }, \"\\u00C4\" => { wx: 600, boundingbox: [89, 493, 512, 636] }, \"\\u00C5\" => { wx: 600, boundingbox: [88, 505, 512, 585] }, \"\\u00C6\" => { wx: 600, boundingbox: [83, 468, 517, 631] }, \"\\u00C7\" => { wx: 600, boundingbox: [230, 498, 370, 638] }, \"\\u00C8\" => { wx: 600, boundingbox: [128, 498, 472, 638] }, \"\\u00CA\" => { wx: 600, boundingbox: [198, 481, 402, 678] }, \"\\u00CB\" => { wx: 600, boundingbox: [205, -206, 387, 0] }, \"\\u00CD\" => { wx: 600, boundingbox: [68, 488, 588, 661] }, \"\\u00CE\" => { wx: 600, boundingbox: [169, -199, 400, 0] }, \"\\u00CF\" => { wx: 600, boundingbox: [103, 493, 497, 667] }, \"\\u00D0\" => { wx: 600, boundingbox: [-10, 203, 610, 313] }, \"\\u00E1\" => { wx: 600, boundingbox: [-29, 0, 602, 562] }, \"\\u00E3\" => { wx: 600, boundingbox: [147, 196, 453, 580] }, \"\\u00E8\" => { wx: 600, boundingbox: [39, 0, 578, 562] }, \"\\u00E9\" => { wx: 600, boundingbox: [22, -22, 578, 584] }, \"\\u00EA\" => { wx: 600, boundingbox: [-25, 0, 595, 562] }, \"\\u00EB\" => { wx: 600, boundingbox: [147, 196, 453, 580] }, \"\\u00F1\" => { wx: 600, boundingbox: [-4, -15, 601, 454] }, \"\\u00F5\" => { wx: 600, boundingbox: [77, 0, 523, 439] }, \"\\u00F8\" => { wx: 600, boundingbox: [77, 0, 523, 626] }, \"\\u00F9\" => { wx: 600, boundingbox: [30, -24, 570, 463] }, \"\\u00FA\" => { wx: 600, boundingbox: [-18, -15, 611, 454] }, \"\\u00FB\" => { wx: 600, boundingbox: [22, -15, 596, 626] }, \"\\xFF\" => { wx: 600, boundingbox: [0, 0, 0, 0] } }\n courier_oblique_metrics = { ' ' => { wx: 600, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 600, boundingbox: [243, -15, 464, 572] }, '\"' => { wx: 600, boundingbox: [273, 328, 532, 562] }, '#' => { wx: 600, boundingbox: [133, -32, 596, 639] }, '$' => { wx: 600, boundingbox: [108, -126, 596, 662] }, '%' => { wx: 600, boundingbox: [134, -15, 599, 622] }, '&' => { wx: 600, boundingbox: [87, -15, 580, 543] }, \"'\" => { wx: 600, boundingbox: [283, 328, 495, 562] }, '(' => { wx: 600, boundingbox: [313, -108, 572, 622] }, ')' => { wx: 600, boundingbox: [137, -108, 396, 622] }, '*' => { wx: 600, boundingbox: [212, 257, 580, 607] }, '+' => { wx: 600, boundingbox: [129, 44, 580, 470] }, ',' => { wx: 600, boundingbox: [157, -112, 370, 122] }, '-' => { wx: 600, boundingbox: [152, 231, 558, 285] }, '.' => { wx: 600, boundingbox: [238, -15, 382, 109] }, '/' => { wx: 600, boundingbox: [112, -80, 604, 629] }, '0' => { wx: 600, boundingbox: [154, -15, 575, 622] }, '1' => { wx: 600, boundingbox: [98, 0, 515, 622] }, '2' => { wx: 600, boundingbox: [70, 0, 568, 622] }, '3' => { wx: 600, boundingbox: [82, -15, 538, 622] }, '4' => { wx: 600, boundingbox: [108, 0, 541, 622] }, '5' => { wx: 600, boundingbox: [99, -15, 589, 607] }, '6' => { wx: 600, boundingbox: [155, -15, 629, 622] }, '7' => { wx: 600, boundingbox: [182, 0, 612, 607] }, '8' => { wx: 600, boundingbox: [132, -15, 588, 622] }, '9' => { wx: 600, boundingbox: [93, -15, 574, 622] }, ':' => { wx: 600, boundingbox: [238, -15, 441, 385] }, ';' => { wx: 600, boundingbox: [157, -112, 441, 385] }, '<' => { wx: 600, boundingbox: [96, 42, 610, 472] }, '=' => { wx: 600, boundingbox: [109, 138, 600, 376] }, '>' => { wx: 600, boundingbox: [85, 42, 599, 472] }, '?' => { wx: 600, boundingbox: [222, -15, 583, 572] }, '@' => { wx: 600, boundingbox: [127, -15, 582, 622] }, 'A' => { wx: 600, boundingbox: [3, 0, 607, 562] }, 'B' => { wx: 600, boundingbox: [43, 0, 616, 562] }, 'C' => { wx: 600, boundingbox: [93, -18, 655, 580] }, 'D' => { wx: 600, boundingbox: [43, 0, 645, 562] }, 'E' => { wx: 600, boundingbox: [53, 0, 660, 562] }, 'F' => { wx: 600, boundingbox: [53, 0, 660, 562] }, 'G' => { wx: 600, boundingbox: [83, -18, 645, 580] }, 'H' => { wx: 600, boundingbox: [32, 0, 687, 562] }, 'I' => { wx: 600, boundingbox: [96, 0, 623, 562] }, 'J' => { wx: 600, boundingbox: [52, -18, 685, 562] }, 'K' => { wx: 600, boundingbox: [38, 0, 671, 562] }, 'L' => { wx: 600, boundingbox: [47, 0, 607, 562] }, 'M' => { wx: 600, boundingbox: [4, 0, 715, 562] }, 'N' => { wx: 600, boundingbox: [7, -13, 712, 562] }, 'O' => { wx: 600, boundingbox: [94, -18, 625, 580] }, 'P' => { wx: 600, boundingbox: [79, 0, 644, 562] }, 'Q' => { wx: 600, boundingbox: [95, -138, 625, 580] }, 'R' => { wx: 600, boundingbox: [38, 0, 598, 562] }, 'S' => { wx: 600, boundingbox: [76, -20, 650, 580] }, 'T' => { wx: 600, boundingbox: [108, 0, 665, 562] }, 'U' => { wx: 600, boundingbox: [125, -18, 702, 562] }, 'V' => { wx: 600, boundingbox: [105, -13, 723, 562] }, 'W' => { wx: 600, boundingbox: [106, -13, 722, 562] }, 'X' => { wx: 600, boundingbox: [23, 0, 675, 562] }, 'Y' => { wx: 600, boundingbox: [133, 0, 695, 562] }, 'Z' => { wx: 600, boundingbox: [86, 0, 610, 562] }, '[' => { wx: 600, boundingbox: [246, -108, 574, 622] }, '\\\\' => { wx: 600, boundingbox: [249, -80, 468, 629] }, ']' => { wx: 600, boundingbox: [135, -108, 463, 622] }, '^' => { wx: 600, boundingbox: [175, 354, 587, 622] }, '_' => { wx: 600, boundingbox: [-27, -125, 584, -75] }, '`' => { wx: 600, boundingbox: [343, 328, 457, 562] }, 'a' => { wx: 600, boundingbox: [76, -15, 569, 441] }, 'b' => { wx: 600, boundingbox: [29, -15, 625, 629] }, 'c' => { wx: 600, boundingbox: [106, -15, 608, 441] }, 'd' => { wx: 600, boundingbox: [85, -15, 640, 629] }, 'e' => { wx: 600, boundingbox: [106, -15, 598, 441] }, 'f' => { wx: 600, boundingbox: [114, 0, 662, 629] }, 'g' => { wx: 600, boundingbox: [61, -157, 657, 441] }, 'h' => { wx: 600, boundingbox: [33, 0, 592, 629] }, 'i' => { wx: 600, boundingbox: [95, 0, 515, 657] }, 'j' => { wx: 600, boundingbox: [52, -157, 550, 657] }, 'k' => { wx: 600, boundingbox: [58, 0, 633, 629] }, 'l' => { wx: 600, boundingbox: [95, 0, 515, 629] }, 'm' => { wx: 600, boundingbox: [-5, 0, 615, 441] }, 'n' => { wx: 600, boundingbox: [26, 0, 585, 441] }, 'o' => { wx: 600, boundingbox: [102, -15, 588, 441] }, 'p' => { wx: 600, boundingbox: [-24, -157, 605, 441] }, 'q' => { wx: 600, boundingbox: [85, -157, 682, 441] }, 'r' => { wx: 600, boundingbox: [60, 0, 636, 441] }, 's' => { wx: 600, boundingbox: [78, -15, 584, 441] }, 't' => { wx: 600, boundingbox: [167, -15, 561, 561] }, 'u' => { wx: 600, boundingbox: [101, -15, 572, 426] }, 'v' => { wx: 600, boundingbox: [90, -10, 681, 426] }, 'w' => { wx: 600, boundingbox: [76, -10, 695, 426] }, 'x' => { wx: 600, boundingbox: [20, 0, 655, 426] }, 'y' => { wx: 600, boundingbox: [-4, -157, 683, 426] }, 'z' => { wx: 600, boundingbox: [99, 0, 593, 426] }, '{' => { wx: 600, boundingbox: [233, -108, 569, 622] }, '|' => { wx: 600, boundingbox: [222, -250, 485, 750] }, '}' => { wx: 600, boundingbox: [140, -108, 477, 622] }, '~' => { wx: 600, boundingbox: [116, 197, 600, 320] }, \"\\u00A1\" => { wx: 600, boundingbox: [225, -157, 445, 430] }, \"\\u00A2\" => { wx: 600, boundingbox: [151, -49, 588, 614] }, \"\\u00A3\" => { wx: 600, boundingbox: [124, -21, 621, 611] }, \"\\u00A4\" => { wx: 600, boundingbox: [84, -57, 646, 665] }, \"\\u00A5\" => { wx: 600, boundingbox: [120, 0, 693, 562] }, \"\\u00A6\" => { wx: 600, boundingbox: [-26, -143, 671, 622] }, \"\\u00A7\" => { wx: 600, boundingbox: [104, -78, 590, 580] }, \"\\u00A8\" => { wx: 600, boundingbox: [94, 58, 628, 506] }, \"\\u00A9\" => { wx: 600, boundingbox: [345, 328, 460, 562] }, \"\\u00AA\" => { wx: 600, boundingbox: [262, 328, 541, 562] }, \"\\u00AB\" => { wx: 600, boundingbox: [92, 70, 652, 446] }, \"\\u00AC\" => { wx: 600, boundingbox: [204, 70, 540, 446] }, \"\\u00AD\" => { wx: 600, boundingbox: [170, 70, 506, 446] }, \"\\u00AE\" => { wx: 600, boundingbox: [3, 0, 619, 629] }, \"\\u00AF\" => { wx: 600, boundingbox: [3, 0, 619, 629] }, \"\\u00B1\" => { wx: 600, boundingbox: [124, 231, 586, 285] }, \"\\u00B2\" => { wx: 600, boundingbox: [217, -78, 546, 580] }, \"\\u00B3\" => { wx: 600, boundingbox: [163, -78, 546, 580] }, \"\\u00B4\" => { wx: 600, boundingbox: [275, 189, 434, 327] }, \"\\u00B6\" => { wx: 600, boundingbox: [100, -78, 630, 562] }, \"\\u00B7\" => { wx: 600, boundingbox: [224, 130, 485, 383] }, \"\\u00B8\" => { wx: 600, boundingbox: [185, -134, 397, 100] }, \"\\u00B9\" => { wx: 600, boundingbox: [115, -134, 478, 100] }, \"\\u00BA\" => { wx: 600, boundingbox: [213, 328, 576, 562] }, \"\\u00BB\" => { wx: 600, boundingbox: [58, 70, 618, 446] }, \"\\u00BC\" => { wx: 600, boundingbox: [46, -15, 575, 111] }, \"\\u00BD\" => { wx: 600, boundingbox: [59, -15, 627, 622] }, \"\\u00BF\" => { wx: 600, boundingbox: [105, -157, 466, 430] }, \"\\u00C1\" => { wx: 600, boundingbox: [294, 497, 484, 672] }, \"\\u00C2\" => { wx: 600, boundingbox: [348, 497, 612, 672] }, \"\\u00C3\" => { wx: 600, boundingbox: [229, 477, 581, 654] }, \"\\u00C4\" => { wx: 600, boundingbox: [212, 489, 629, 606] }, \"\\u00C5\" => { wx: 600, boundingbox: [232, 525, 600, 565] }, \"\\u00C6\" => { wx: 600, boundingbox: [279, 501, 576, 609] }, \"\\u00C7\" => { wx: 600, boundingbox: [373, 537, 478, 640] }, \"\\u00C8\" => { wx: 600, boundingbox: [272, 537, 579, 640] }, \"\\u00CA\" => { wx: 600, boundingbox: [332, 463, 500, 627] }, \"\\u00CB\" => { wx: 600, boundingbox: [197, -151, 344, 10] }, \"\\u00CD\" => { wx: 600, boundingbox: [239, 497, 683, 672] }, \"\\u00CE\" => { wx: 600, boundingbox: [189, -172, 377, 4] }, \"\\u00CF\" => { wx: 600, boundingbox: [262, 492, 614, 669] }, \"\\u00D0\" => { wx: 600, boundingbox: [49, 231, 661, 285] }, \"\\u00E1\" => { wx: 600, boundingbox: [3, 0, 655, 562] }, \"\\u00E3\" => { wx: 600, boundingbox: [209, 249, 512, 580] }, \"\\u00E8\" => { wx: 600, boundingbox: [47, 0, 607, 562] }, \"\\u00E9\" => { wx: 600, boundingbox: [94, -80, 625, 629] }, \"\\u00EA\" => { wx: 600, boundingbox: [59, 0, 672, 562] }, \"\\u00EB\" => { wx: 600, boundingbox: [210, 249, 535, 580] }, \"\\u00F1\" => { wx: 600, boundingbox: [41, -15, 626, 441] }, \"\\u00F5\" => { wx: 600, boundingbox: [95, 0, 515, 426] }, \"\\u00F8\" => { wx: 600, boundingbox: [95, 0, 587, 629] }, \"\\u00F9\" => { wx: 600, boundingbox: [102, -80, 588, 506] }, \"\\u00FA\" => { wx: 600, boundingbox: [54, -15, 615, 441] }, \"\\u00FB\" => { wx: 600, boundingbox: [48, -15, 617, 629] }, \"\\xFF\" => { wx: 600, boundingbox: [0, 0, 0, 0] } }\n courier_boldoblique_metrics = { ' ' => { wx: 600, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 600, boundingbox: [215, -15, 495, 572] }, '\"' => { wx: 600, boundingbox: [211, 277, 585, 562] }, '#' => { wx: 600, boundingbox: [88, -45, 641, 651] }, '$' => { wx: 600, boundingbox: [87, -126, 630, 666] }, '%' => { wx: 600, boundingbox: [101, -15, 625, 616] }, '&' => { wx: 600, boundingbox: [61, -15, 595, 543] }, \"'\" => { wx: 600, boundingbox: [229, 277, 543, 562] }, '(' => { wx: 600, boundingbox: [265, -102, 592, 616] }, ')' => { wx: 600, boundingbox: [117, -102, 444, 616] }, '*' => { wx: 600, boundingbox: [179, 219, 598, 601] }, '+' => { wx: 600, boundingbox: [114, 39, 596, 478] }, ',' => { wx: 600, boundingbox: [99, -111, 430, 174] }, '-' => { wx: 600, boundingbox: [143, 203, 567, 313] }, '.' => { wx: 600, boundingbox: [206, -15, 427, 171] }, '/' => { wx: 600, boundingbox: [90, -77, 626, 626] }, '0' => { wx: 600, boundingbox: [135, -15, 593, 616] }, '1' => { wx: 600, boundingbox: [93, 0, 562, 616] }, '2' => { wx: 600, boundingbox: [61, 0, 594, 616] }, '3' => { wx: 600, boundingbox: [71, -15, 571, 616] }, '4' => { wx: 600, boundingbox: [81, 0, 559, 616] }, '5' => { wx: 600, boundingbox: [77, -15, 621, 601] }, '6' => { wx: 600, boundingbox: [135, -15, 652, 616] }, '7' => { wx: 600, boundingbox: [147, 0, 622, 601] }, '8' => { wx: 600, boundingbox: [115, -15, 604, 616] }, '9' => { wx: 600, boundingbox: [75, -15, 592, 616] }, ':' => { wx: 600, boundingbox: [205, -15, 480, 425] }, ';' => { wx: 600, boundingbox: [99, -111, 481, 425] }, '<' => { wx: 600, boundingbox: [120, 15, 613, 501] }, '=' => { wx: 600, boundingbox: [96, 118, 614, 398] }, '>' => { wx: 600, boundingbox: [97, 15, 589, 501] }, '?' => { wx: 600, boundingbox: [183, -14, 592, 580] }, '@' => { wx: 600, boundingbox: [65, -15, 642, 616] }, 'A' => { wx: 600, boundingbox: [-9, 0, 632, 562] }, 'B' => { wx: 600, boundingbox: [30, 0, 630, 562] }, 'C' => { wx: 600, boundingbox: [74, -18, 675, 580] }, 'D' => { wx: 600, boundingbox: [30, 0, 664, 562] }, 'E' => { wx: 600, boundingbox: [25, 0, 670, 562] }, 'F' => { wx: 600, boundingbox: [39, 0, 684, 562] }, 'G' => { wx: 600, boundingbox: [74, -18, 675, 580] }, 'H' => { wx: 600, boundingbox: [20, 0, 700, 562] }, 'I' => { wx: 600, boundingbox: [77, 0, 643, 562] }, 'J' => { wx: 600, boundingbox: [58, -18, 721, 562] }, 'K' => { wx: 600, boundingbox: [21, 0, 692, 562] }, 'L' => { wx: 600, boundingbox: [39, 0, 636, 562] }, 'M' => { wx: 600, boundingbox: [-2, 0, 722, 562] }, 'N' => { wx: 600, boundingbox: [8, -12, 730, 562] }, 'O' => { wx: 600, boundingbox: [74, -18, 645, 580] }, 'P' => { wx: 600, boundingbox: [48, 0, 643, 562] }, 'Q' => { wx: 600, boundingbox: [83, -138, 636, 580] }, 'R' => { wx: 600, boundingbox: [24, 0, 617, 562] }, 'S' => { wx: 600, boundingbox: [54, -22, 673, 582] }, 'T' => { wx: 600, boundingbox: [86, 0, 679, 562] }, 'U' => { wx: 600, boundingbox: [101, -18, 716, 562] }, 'V' => { wx: 600, boundingbox: [84, 0, 733, 562] }, 'W' => { wx: 600, boundingbox: [79, 0, 738, 562] }, 'X' => { wx: 600, boundingbox: [12, 0, 690, 562] }, 'Y' => { wx: 600, boundingbox: [109, 0, 709, 562] }, 'Z' => { wx: 600, boundingbox: [62, 0, 637, 562] }, '[' => { wx: 600, boundingbox: [223, -102, 606, 616] }, '\\\\' => { wx: 600, boundingbox: [222, -77, 496, 626] }, ']' => { wx: 600, boundingbox: [103, -102, 486, 616] }, '^' => { wx: 600, boundingbox: [171, 250, 556, 616] }, '_' => { wx: 600, boundingbox: [-27, -125, 585, -75] }, '`' => { wx: 600, boundingbox: [297, 277, 487, 562] }, 'a' => { wx: 600, boundingbox: [61, -15, 593, 454] }, 'b' => { wx: 600, boundingbox: [13, -15, 636, 626] }, 'c' => { wx: 600, boundingbox: [81, -15, 631, 459] }, 'd' => { wx: 600, boundingbox: [60, -15, 645, 626] }, 'e' => { wx: 600, boundingbox: [81, -15, 605, 454] }, 'f' => { wx: 600, boundingbox: [83, 0, 677, 626] }, 'g' => { wx: 600, boundingbox: [40, -146, 674, 454] }, 'h' => { wx: 600, boundingbox: [18, 0, 615, 626] }, 'i' => { wx: 600, boundingbox: [77, 0, 546, 658] }, 'j' => { wx: 600, boundingbox: [36, -146, 580, 658] }, 'k' => { wx: 600, boundingbox: [33, 0, 643, 626] }, 'l' => { wx: 600, boundingbox: [77, 0, 546, 626] }, 'm' => { wx: 600, boundingbox: [-22, 0, 649, 454] }, 'n' => { wx: 600, boundingbox: [18, 0, 615, 454] }, 'o' => { wx: 600, boundingbox: [71, -15, 622, 454] }, 'p' => { wx: 600, boundingbox: [-32, -142, 622, 454] }, 'q' => { wx: 600, boundingbox: [60, -142, 685, 454] }, 'r' => { wx: 600, boundingbox: [47, 0, 655, 454] }, 's' => { wx: 600, boundingbox: [66, -17, 608, 459] }, 't' => { wx: 600, boundingbox: [118, -15, 567, 562] }, 'u' => { wx: 600, boundingbox: [70, -15, 592, 439] }, 'v' => { wx: 600, boundingbox: [70, 0, 695, 439] }, 'w' => { wx: 600, boundingbox: [53, 0, 712, 439] }, 'x' => { wx: 600, boundingbox: [6, 0, 671, 439] }, 'y' => { wx: 600, boundingbox: [-21, -142, 695, 439] }, 'z' => { wx: 600, boundingbox: [81, 0, 614, 439] }, '{' => { wx: 600, boundingbox: [203, -102, 595, 616] }, '|' => { wx: 600, boundingbox: [201, -250, 505, 750] }, '}' => { wx: 600, boundingbox: [114, -102, 506, 616] }, '~' => { wx: 600, boundingbox: [120, 153, 590, 356] }, \"\\u00A1\" => { wx: 600, boundingbox: [196, -146, 477, 449] }, \"\\u00A2\" => { wx: 600, boundingbox: [121, -49, 605, 614] }, \"\\u00A3\" => { wx: 600, boundingbox: [106, -28, 650, 611] }, \"\\u00A4\" => { wx: 600, boundingbox: [22, -60, 708, 661] }, \"\\u00A5\" => { wx: 600, boundingbox: [98, 0, 710, 562] }, \"\\u00A6\" => { wx: 600, boundingbox: [-57, -131, 702, 616] }, \"\\u00A7\" => { wx: 600, boundingbox: [74, -70, 620, 580] }, \"\\u00A8\" => { wx: 600, boundingbox: [77, 49, 644, 517] }, \"\\u00A9\" => { wx: 600, boundingbox: [303, 277, 493, 562] }, \"\\u00AA\" => { wx: 600, boundingbox: [190, 277, 594, 562] }, \"\\u00AB\" => { wx: 600, boundingbox: [62, 70, 639, 446] }, \"\\u00AC\" => { wx: 600, boundingbox: [195, 70, 545, 446] }, \"\\u00AD\" => { wx: 600, boundingbox: [165, 70, 514, 446] }, \"\\u00AE\" => { wx: 600, boundingbox: [12, 0, 644, 626] }, \"\\u00AF\" => { wx: 600, boundingbox: [12, 0, 644, 626] }, \"\\u00B1\" => { wx: 600, boundingbox: [108, 203, 602, 313] }, \"\\u00B2\" => { wx: 600, boundingbox: [175, -70, 586, 580] }, \"\\u00B3\" => { wx: 600, boundingbox: [121, -70, 587, 580] }, \"\\u00B4\" => { wx: 600, boundingbox: [248, 165, 461, 351] }, \"\\u00B6\" => { wx: 600, boundingbox: [61, -70, 700, 580] }, \"\\u00B7\" => { wx: 600, boundingbox: [196, 132, 523, 430] }, \"\\u00B8\" => { wx: 600, boundingbox: [144, -142, 458, 143] }, \"\\u00B9\" => { wx: 600, boundingbox: [34, -142, 560, 143] }, \"\\u00BA\" => { wx: 600, boundingbox: [119, 277, 645, 562] }, \"\\u00BB\" => { wx: 600, boundingbox: [71, 70, 647, 446] }, \"\\u00BC\" => { wx: 600, boundingbox: [35, -15, 587, 116] }, \"\\u00BD\" => { wx: 600, boundingbox: [-45, -15, 743, 616] }, \"\\u00BF\" => { wx: 600, boundingbox: [100, -146, 509, 449] }, \"\\u00C1\" => { wx: 600, boundingbox: [272, 508, 503, 661] }, \"\\u00C2\" => { wx: 600, boundingbox: [312, 508, 609, 661] }, \"\\u00C3\" => { wx: 600, boundingbox: [212, 483, 607, 657] }, \"\\u00C4\" => { wx: 600, boundingbox: [199, 493, 643, 636] }, \"\\u00C5\" => { wx: 600, boundingbox: [195, 505, 637, 585] }, \"\\u00C6\" => { wx: 600, boundingbox: [217, 468, 652, 631] }, \"\\u00C7\" => { wx: 600, boundingbox: [348, 498, 493, 638] }, \"\\u00C8\" => { wx: 600, boundingbox: [246, 498, 595, 638] }, \"\\u00CA\" => { wx: 600, boundingbox: [319, 481, 528, 678] }, \"\\u00CB\" => { wx: 600, boundingbox: [168, -206, 368, 0] }, \"\\u00CD\" => { wx: 600, boundingbox: [171, 488, 729, 661] }, \"\\u00CE\" => { wx: 600, boundingbox: [143, -199, 367, 0] }, \"\\u00CF\" => { wx: 600, boundingbox: [238, 493, 633, 667] }, \"\\u00D0\" => { wx: 600, boundingbox: [33, 203, 677, 313] }, \"\\u00E1\" => { wx: 600, boundingbox: [-29, 0, 708, 562] }, \"\\u00E3\" => { wx: 600, boundingbox: [188, 196, 526, 580] }, \"\\u00E8\" => { wx: 600, boundingbox: [39, 0, 636, 562] }, \"\\u00E9\" => { wx: 600, boundingbox: [48, -22, 673, 584] }, \"\\u00EA\" => { wx: 600, boundingbox: [26, 0, 701, 562] }, \"\\u00EB\" => { wx: 600, boundingbox: [188, 196, 543, 580] }, \"\\u00F1\" => { wx: 600, boundingbox: [21, -15, 652, 454] }, \"\\u00F5\" => { wx: 600, boundingbox: [77, 0, 546, 439] }, \"\\u00F8\" => { wx: 600, boundingbox: [77, 0, 587, 626] }, \"\\u00F9\" => { wx: 600, boundingbox: [54, -24, 638, 463] }, \"\\u00FA\" => { wx: 600, boundingbox: [18, -15, 662, 454] }, \"\\u00FB\" => { wx: 600, boundingbox: [22, -15, 629, 626] }, \"\\xFF\" => { wx: 600, boundingbox: [0, 0, 0, 0] } }\n symbol_metrics = { ' ' => { wx: 250, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 333, boundingbox: [128, -17, 240, 672] }, '\"' => { wx: 713, boundingbox: [31, 0, 681, 705] }, '#' => { wx: 500, boundingbox: [20, -16, 481, 673] }, '$' => { wx: 549, boundingbox: [25, 0, 478, 707] }, '%' => { wx: 833, boundingbox: [63, -36, 771, 655] }, '&' => { wx: 778, boundingbox: [41, -18, 750, 661] }, \"'\" => { wx: 439, boundingbox: [48, -17, 414, 500] }, '(' => { wx: 333, boundingbox: [53, -191, 300, 673] }, ')' => { wx: 333, boundingbox: [30, -191, 277, 673] }, '*' => { wx: 500, boundingbox: [65, 134, 427, 551] }, '+' => { wx: 549, boundingbox: [10, 0, 539, 533] }, ',' => { wx: 250, boundingbox: [56, -152, 194, 104] }, '-' => { wx: 549, boundingbox: [11, 233, 535, 288] }, '.' => { wx: 250, boundingbox: [69, -17, 181, 95] }, '/' => { wx: 278, boundingbox: [0, -18, 254, 646] }, '0' => { wx: 500, boundingbox: [24, -14, 476, 685] }, '1' => { wx: 500, boundingbox: [117, 0, 390, 673] }, '2' => { wx: 500, boundingbox: [25, 0, 475, 685] }, '3' => { wx: 500, boundingbox: [43, -14, 435, 685] }, '4' => { wx: 500, boundingbox: [15, 0, 469, 685] }, '5' => { wx: 500, boundingbox: [32, -14, 445, 690] }, '6' => { wx: 500, boundingbox: [34, -14, 468, 685] }, '7' => { wx: 500, boundingbox: [24, -16, 448, 673] }, '8' => { wx: 500, boundingbox: [56, -14, 445, 685] }, '9' => { wx: 500, boundingbox: [30, -18, 459, 685] }, ':' => { wx: 278, boundingbox: [81, -17, 193, 460] }, ';' => { wx: 278, boundingbox: [83, -152, 221, 460] }, '<' => { wx: 549, boundingbox: [26, 0, 523, 522] }, '=' => { wx: 549, boundingbox: [11, 141, 537, 390] }, '>' => { wx: 549, boundingbox: [26, 0, 523, 522] }, '?' => { wx: 444, boundingbox: [70, -17, 412, 686] }, '@' => { wx: 549, boundingbox: [11, 0, 537, 475] }, 'A' => { wx: 722, boundingbox: [4, 0, 684, 673] }, 'B' => { wx: 667, boundingbox: [29, 0, 592, 673] }, 'C' => { wx: 722, boundingbox: [-9, 0, 704, 673] }, 'D' => { wx: 612, boundingbox: [6, 0, 608, 688] }, 'E' => { wx: 611, boundingbox: [32, 0, 617, 673] }, 'F' => { wx: 763, boundingbox: [26, 0, 741, 673] }, 'G' => { wx: 603, boundingbox: [24, 0, 609, 673] }, 'H' => { wx: 722, boundingbox: [39, 0, 729, 673] }, 'I' => { wx: 333, boundingbox: [32, 0, 316, 673] }, 'J' => { wx: 631, boundingbox: [18, -18, 623, 689] }, 'K' => { wx: 722, boundingbox: [35, 0, 722, 673] }, 'L' => { wx: 686, boundingbox: [6, 0, 680, 688] }, 'M' => { wx: 889, boundingbox: [28, 0, 887, 673] }, 'N' => { wx: 722, boundingbox: [29, -8, 720, 673] }, 'O' => { wx: 722, boundingbox: [41, -17, 715, 685] }, 'P' => { wx: 768, boundingbox: [25, 0, 745, 673] }, 'Q' => { wx: 741, boundingbox: [41, -17, 715, 685] }, 'R' => { wx: 556, boundingbox: [28, 0, 563, 673] }, 'S' => { wx: 592, boundingbox: [5, 0, 589, 673] }, 'T' => { wx: 611, boundingbox: [33, 0, 607, 673] }, 'U' => { wx: 690, boundingbox: [-8, 0, 694, 673] }, 'V' => { wx: 439, boundingbox: [40, -233, 436, 500] }, 'W' => { wx: 768, boundingbox: [34, 0, 736, 688] }, 'X' => { wx: 645, boundingbox: [40, 0, 599, 673] }, 'Y' => { wx: 795, boundingbox: [15, 0, 781, 684] }, 'Z' => { wx: 611, boundingbox: [44, 0, 636, 673] }, '[' => { wx: 333, boundingbox: [86, -155, 299, 674] }, '\\\\' => { wx: 863, boundingbox: [163, 0, 701, 487] }, ']' => { wx: 333, boundingbox: [33, -155, 246, 674] }, '^' => { wx: 658, boundingbox: [15, 0, 652, 674] }, '_' => { wx: 500, boundingbox: [-2, -125, 502, -75] }, '`' => { wx: 500, boundingbox: [480, 881, 1090, 917] }, 'a' => { wx: 631, boundingbox: [41, -18, 622, 500] }, 'b' => { wx: 549, boundingbox: [61, -223, 515, 741] }, 'c' => { wx: 549, boundingbox: [12, -231, 522, 499] }, 'd' => { wx: 494, boundingbox: [40, -19, 481, 740] }, 'e' => { wx: 439, boundingbox: [22, -19, 427, 502] }, 'f' => { wx: 521, boundingbox: [28, -224, 492, 673] }, 'g' => { wx: 411, boundingbox: [5, -225, 484, 499] }, 'h' => { wx: 603, boundingbox: [0, -202, 527, 514] }, 'i' => { wx: 329, boundingbox: [0, -17, 301, 503] }, 'j' => { wx: 603, boundingbox: [36, -224, 587, 499] }, 'k' => { wx: 549, boundingbox: [33, 0, 558, 501] }, 'l' => { wx: 549, boundingbox: [24, -17, 548, 739] }, 'm' => { wx: 576, boundingbox: [33, -223, 567, 500] }, 'n' => { wx: 521, boundingbox: [-9, -16, 475, 507] }, 'o' => { wx: 549, boundingbox: [35, -19, 501, 499] }, 'p' => { wx: 549, boundingbox: [10, -19, 530, 487] }, 'q' => { wx: 521, boundingbox: [43, -17, 485, 690] }, 'r' => { wx: 549, boundingbox: [50, -230, 490, 499] }, 's' => { wx: 603, boundingbox: [30, -21, 588, 500] }, 't' => { wx: 439, boundingbox: [10, -19, 418, 500] }, 'u' => { wx: 576, boundingbox: [7, -18, 535, 507] }, 'v' => { wx: 713, boundingbox: [12, -18, 671, 583] }, 'w' => { wx: 686, boundingbox: [42, -17, 684, 500] }, 'x' => { wx: 493, boundingbox: [27, -224, 469, 766] }, 'y' => { wx: 686, boundingbox: [12, -228, 701, 500] }, 'z' => { wx: 494, boundingbox: [60, -225, 467, 756] }, '{' => { wx: 480, boundingbox: [58, -183, 397, 673] }, '|' => { wx: 200, boundingbox: [65, -293, 135, 707] }, '}' => { wx: 480, boundingbox: [79, -183, 418, 673] }, '~' => { wx: 549, boundingbox: [17, 203, 529, 307] }, \"\\u00A0\" => { wx: 750, boundingbox: [20, -12, 714, 685] }, \"\\u00A1\" => { wx: 620, boundingbox: [-2, 0, 610, 685] }, \"\\u00A2\" => { wx: 247, boundingbox: [27, 459, 228, 735] }, \"\\u00A3\" => { wx: 549, boundingbox: [29, 0, 526, 639] }, \"\\u00A4\" => { wx: 167, boundingbox: [-180, -12, 340, 677] }, \"\\u00A5\" => { wx: 713, boundingbox: [26, 124, 688, 404] }, \"\\u00A6\" => { wx: 500, boundingbox: [2, -193, 494, 686] }, \"\\u00A7\" => { wx: 753, boundingbox: [86, -26, 660, 533] }, \"\\u00A8\" => { wx: 753, boundingbox: [142, -36, 600, 550] }, \"\\u00A9\" => { wx: 753, boundingbox: [117, -33, 631, 532] }, \"\\u00AA\" => { wx: 753, boundingbox: [113, -36, 629, 548] }, \"\\u00AB\" => { wx: 1042, boundingbox: [24, -15, 1024, 511] }, \"\\u00AC\" => { wx: 987, boundingbox: [32, -15, 942, 511] }, \"\\u00AD\" => { wx: 603, boundingbox: [45, 0, 571, 910] }, \"\\u00AE\" => { wx: 987, boundingbox: [49, -15, 959, 511] }, \"\\u00AF\" => { wx: 603, boundingbox: [45, -22, 571, 888] }, \"\\u00B0\" => { wx: 400, boundingbox: [50, 385, 350, 685] }, \"\\u00B1\" => { wx: 549, boundingbox: [10, 0, 539, 645] }, \"\\u00B2\" => { wx: 411, boundingbox: [20, 459, 413, 737] }, \"\\u00B3\" => { wx: 549, boundingbox: [29, 0, 526, 639] }, \"\\u00B4\" => { wx: 549, boundingbox: [17, 8, 533, 524] }, \"\\u00B5\" => { wx: 713, boundingbox: [27, 123, 639, 404] }, \"\\u00B6\" => { wx: 494, boundingbox: [26, -20, 462, 746] }, \"\\u00B7\" => { wx: 460, boundingbox: [50, 113, 410, 473] }, \"\\u00B8\" => { wx: 549, boundingbox: [10, 71, 536, 456] }, \"\\u00B9\" => { wx: 549, boundingbox: [15, -25, 540, 549] }, \"\\u00BA\" => { wx: 549, boundingbox: [14, 82, 538, 443] }, \"\\u00BB\" => { wx: 549, boundingbox: [14, 135, 527, 394] }, \"\\u00BC\" => { wx: 1000, boundingbox: [111, -17, 889, 95] }, \"\\u00BD\" => { wx: 603, boundingbox: [280, -120, 336, 1010] }, \"\\u00BE\" => { wx: 1000, boundingbox: [-60, 220, 1050, 276] }, \"\\u00BF\" => { wx: 658, boundingbox: [15, -16, 602, 629] }, \"\\u00C0\" => { wx: 823, boundingbox: [175, -18, 661, 658] }, \"\\u00C1\" => { wx: 686, boundingbox: [10, -53, 578, 740] }, \"\\u00C2\" => { wx: 795, boundingbox: [26, -15, 759, 734] }, \"\\u00C3\" => { wx: 987, boundingbox: [159, -211, 870, 573] }, \"\\u00C4\" => { wx: 768, boundingbox: [43, -17, 733, 673] }, \"\\u00C5\" => { wx: 768, boundingbox: [43, -15, 733, 675] }, \"\\u00C6\" => { wx: 823, boundingbox: [39, -24, 781, 719] }, \"\\u00C7\" => { wx: 768, boundingbox: [40, 0, 732, 509] }, \"\\u00C8\" => { wx: 768, boundingbox: [40, -17, 732, 492] }, \"\\u00C9\" => { wx: 713, boundingbox: [20, 0, 673, 470] }, \"\\u00CA\" => { wx: 713, boundingbox: [20, -125, 673, 470] }, \"\\u00CB\" => { wx: 713, boundingbox: [36, -70, 690, 540] }, \"\\u00CC\" => { wx: 713, boundingbox: [37, 0, 690, 470] }, \"\\u00CD\" => { wx: 713, boundingbox: [37, -125, 690, 470] }, \"\\u00CE\" => { wx: 713, boundingbox: [45, 0, 505, 468] }, \"\\u00CF\" => { wx: 713, boundingbox: [45, -58, 505, 555] }, \"\\u00D0\" => { wx: 768, boundingbox: [26, 0, 738, 673] }, \"\\u00D1\" => { wx: 713, boundingbox: [36, -19, 681, 718] }, \"\\u00D2\" => { wx: 790, boundingbox: [50, -17, 740, 673] }, \"\\u00D3\" => { wx: 790, boundingbox: [51, -15, 741, 675] }, \"\\u00D4\" => { wx: 890, boundingbox: [18, 293, 855, 673] }, \"\\u00D5\" => { wx: 823, boundingbox: [25, -101, 803, 751] }, \"\\u00D6\" => { wx: 549, boundingbox: [10, -38, 515, 917] }, \"\\u00D7\" => { wx: 250, boundingbox: [69, 210, 169, 310] }, \"\\u00D8\" => { wx: 713, boundingbox: [15, 0, 680, 288] }, \"\\u00D9\" => { wx: 603, boundingbox: [23, 0, 583, 454] }, \"\\u00DA\" => { wx: 603, boundingbox: [30, 0, 578, 477] }, \"\\u00DB\" => { wx: 1042, boundingbox: [27, -20, 1023, 510] }, \"\\u00DC\" => { wx: 987, boundingbox: [30, -15, 939, 513] }, \"\\u00DD\" => { wx: 603, boundingbox: [39, 2, 567, 911] }, \"\\u00DE\" => { wx: 987, boundingbox: [45, -20, 954, 508] }, \"\\u00DF\" => { wx: 603, boundingbox: [44, -19, 572, 890] }, \"\\u00E0\" => { wx: 494, boundingbox: [18, 0, 466, 745] }, \"\\u00E1\" => { wx: 329, boundingbox: [25, -198, 306, 746] }, \"\\u00E2\" => { wx: 790, boundingbox: [50, -20, 740, 670] }, \"\\u00E3\" => { wx: 790, boundingbox: [49, -15, 739, 675] }, \"\\u00E4\" => { wx: 786, boundingbox: [5, 293, 725, 673] }, \"\\u00E5\" => { wx: 713, boundingbox: [14, -108, 695, 752] }, \"\\u00E6\" => { wx: 384, boundingbox: [24, -293, 436, 926] }, \"\\u00E7\" => { wx: 384, boundingbox: [24, -85, 108, 925] }, \"\\u00E8\" => { wx: 384, boundingbox: [24, -293, 436, 926] }, \"\\u00E9\" => { wx: 384, boundingbox: [0, -80, 349, 926] }, \"\\u00EA\" => { wx: 384, boundingbox: [0, -79, 77, 925] }, \"\\u00EB\" => { wx: 384, boundingbox: [0, -80, 349, 926] }, \"\\u00EC\" => { wx: 494, boundingbox: [209, -85, 445, 925] }, \"\\u00ED\" => { wx: 494, boundingbox: [20, -85, 284, 935] }, \"\\u00EE\" => { wx: 494, boundingbox: [209, -75, 445, 935] }, \"\\u00EF\" => { wx: 494, boundingbox: [209, -85, 284, 935] }, \"\\u00F1\" => { wx: 329, boundingbox: [21, -198, 302, 746] }, \"\\u00F2\" => { wx: 274, boundingbox: [2, -107, 291, 916] }, \"\\u00F3\" => { wx: 686, boundingbox: [308, -88, 675, 920] }, \"\\u00F4\" => { wx: 686, boundingbox: [308, -88, 378, 975] }, \"\\u00F5\" => { wx: 686, boundingbox: [11, -87, 378, 921] }, \"\\u00F6\" => { wx: 384, boundingbox: [54, -293, 466, 926] }, \"\\u00F7\" => { wx: 384, boundingbox: [382, -85, 466, 925] }, \"\\u00F8\" => { wx: 384, boundingbox: [54, -293, 466, 926] }, \"\\u00F9\" => { wx: 384, boundingbox: [22, -80, 371, 926] }, \"\\u00FA\" => { wx: 384, boundingbox: [294, -79, 371, 925] }, \"\\u00FB\" => { wx: 384, boundingbox: [22, -80, 371, 926] }, \"\\u00FC\" => { wx: 494, boundingbox: [48, -85, 284, 925] }, \"\\u00FD\" => { wx: 494, boundingbox: [209, -85, 473, 935] }, \"\\u00FE\" => { wx: 494, boundingbox: [48, -75, 284, 935] }, \"\\xFF\" => { wx: 790, boundingbox: [56, -3, 733, 808] } }\n zapfdingbats_metrics = { ' ' => { wx: 278, boundingbox: [0, 0, 0, 0] }, '!' => { wx: 974, boundingbox: [35, 72, 939, 621] }, '\"' => { wx: 961, boundingbox: [35, 81, 927, 611] }, '#' => { wx: 974, boundingbox: [35, 72, 939, 621] }, '$' => { wx: 980, boundingbox: [35, 0, 945, 692] }, '%' => { wx: 719, boundingbox: [34, 139, 685, 566] }, '&' => { wx: 789, boundingbox: [35, -14, 755, 705] }, \"'\" => { wx: 790, boundingbox: [35, -14, 755, 705] }, '(' => { wx: 791, boundingbox: [35, -13, 761, 705] }, ')' => { wx: 690, boundingbox: [34, 138, 655, 553] }, '*' => { wx: 960, boundingbox: [35, 123, 925, 568] }, '+' => { wx: 939, boundingbox: [35, 134, 904, 559] }, ',' => { wx: 549, boundingbox: [29, -11, 516, 705] }, '-' => { wx: 855, boundingbox: [34, 59, 820, 632] }, '.' => { wx: 911, boundingbox: [35, 50, 876, 642] }, '/' => { wx: 933, boundingbox: [35, 139, 899, 550] }, '0' => { wx: 911, boundingbox: [35, 50, 876, 642] }, '1' => { wx: 945, boundingbox: [35, 139, 909, 553] }, '2' => { wx: 974, boundingbox: [35, 104, 938, 587] }, '3' => { wx: 755, boundingbox: [34, -13, 721, 705] }, '4' => { wx: 846, boundingbox: [36, -14, 811, 705] }, '5' => { wx: 762, boundingbox: [35, 0, 727, 692] }, '6' => { wx: 761, boundingbox: [35, 0, 727, 692] }, '7' => { wx: 571, boundingbox: [-1, -68, 571, 661] }, '8' => { wx: 677, boundingbox: [36, -13, 642, 705] }, '9' => { wx: 763, boundingbox: [35, 0, 728, 692] }, ':' => { wx: 760, boundingbox: [35, 0, 726, 692] }, ';' => { wx: 759, boundingbox: [35, 0, 725, 692] }, '<' => { wx: 754, boundingbox: [35, 0, 720, 692] }, '=' => { wx: 494, boundingbox: [35, 0, 460, 692] }, '>' => { wx: 552, boundingbox: [35, 0, 517, 692] }, '?' => { wx: 537, boundingbox: [35, 0, 503, 692] }, '@' => { wx: 577, boundingbox: [35, 96, 542, 596] }, 'A' => { wx: 692, boundingbox: [35, -14, 657, 705] }, 'B' => { wx: 786, boundingbox: [35, -14, 751, 705] }, 'C' => { wx: 788, boundingbox: [35, -14, 752, 705] }, 'D' => { wx: 788, boundingbox: [35, -14, 753, 705] }, 'E' => { wx: 790, boundingbox: [35, -14, 756, 705] }, 'F' => { wx: 793, boundingbox: [35, -13, 759, 705] }, 'G' => { wx: 794, boundingbox: [35, -13, 759, 705] }, 'H' => { wx: 816, boundingbox: [35, -14, 782, 705] }, 'I' => { wx: 823, boundingbox: [35, -14, 787, 705] }, 'J' => { wx: 789, boundingbox: [35, -14, 754, 705] }, 'K' => { wx: 841, boundingbox: [35, -14, 807, 705] }, 'L' => { wx: 823, boundingbox: [35, -14, 789, 705] }, 'M' => { wx: 833, boundingbox: [35, -14, 798, 705] }, 'N' => { wx: 816, boundingbox: [35, -13, 782, 705] }, 'O' => { wx: 831, boundingbox: [35, -14, 796, 705] }, 'P' => { wx: 923, boundingbox: [35, -14, 888, 705] }, 'Q' => { wx: 744, boundingbox: [35, 0, 710, 692] }, 'R' => { wx: 723, boundingbox: [35, 0, 688, 692] }, 'S' => { wx: 749, boundingbox: [35, 0, 714, 692] }, 'T' => { wx: 790, boundingbox: [34, -14, 756, 705] }, 'U' => { wx: 792, boundingbox: [35, -14, 758, 705] }, 'V' => { wx: 695, boundingbox: [35, -14, 661, 706] }, 'W' => { wx: 776, boundingbox: [35, -6, 741, 699] }, 'X' => { wx: 768, boundingbox: [35, -7, 734, 699] }, 'Y' => { wx: 792, boundingbox: [35, -14, 757, 705] }, 'Z' => { wx: 759, boundingbox: [35, 0, 725, 692] }, '[' => { wx: 707, boundingbox: [35, -13, 672, 704] }, '\\\\' => { wx: 708, boundingbox: [35, -14, 672, 705] }, ']' => { wx: 682, boundingbox: [35, -14, 647, 705] }, '^' => { wx: 701, boundingbox: [35, -14, 666, 705] }, '_' => { wx: 826, boundingbox: [35, -14, 791, 705] }, '`' => { wx: 815, boundingbox: [35, -14, 780, 705] }, 'a' => { wx: 789, boundingbox: [35, -14, 754, 705] }, 'b' => { wx: 789, boundingbox: [35, -14, 754, 705] }, 'c' => { wx: 707, boundingbox: [34, -14, 673, 705] }, 'd' => { wx: 687, boundingbox: [36, 0, 651, 692] }, 'e' => { wx: 696, boundingbox: [35, 0, 661, 691] }, 'f' => { wx: 689, boundingbox: [35, 0, 655, 692] }, 'g' => { wx: 786, boundingbox: [34, -14, 751, 705] }, 'h' => { wx: 787, boundingbox: [35, -14, 752, 705] }, 'i' => { wx: 713, boundingbox: [35, -14, 678, 705] }, 'j' => { wx: 791, boundingbox: [35, -14, 756, 705] }, 'k' => { wx: 785, boundingbox: [36, -14, 751, 705] }, 'l' => { wx: 791, boundingbox: [35, -14, 757, 705] }, 'm' => { wx: 873, boundingbox: [35, -14, 838, 705] }, 'n' => { wx: 761, boundingbox: [35, 0, 726, 692] }, 'o' => { wx: 762, boundingbox: [35, 0, 727, 692] }, 'p' => { wx: 762, boundingbox: [35, 0, 727, 692] }, 'q' => { wx: 759, boundingbox: [35, 0, 725, 692] }, 'r' => { wx: 759, boundingbox: [35, 0, 725, 692] }, 's' => { wx: 892, boundingbox: [35, 0, 858, 705] }, 't' => { wx: 892, boundingbox: [35, -14, 858, 692] }, 'u' => { wx: 788, boundingbox: [35, -14, 754, 705] }, 'v' => { wx: 784, boundingbox: [35, -14, 749, 705] }, 'w' => { wx: 438, boundingbox: [35, -14, 403, 705] }, 'x' => { wx: 138, boundingbox: [35, 0, 104, 692] }, 'y' => { wx: 277, boundingbox: [35, 0, 242, 692] }, 'z' => { wx: 415, boundingbox: [35, 0, 380, 692] }, '{' => { wx: 392, boundingbox: [35, 263, 357, 705] }, '|' => { wx: 392, boundingbox: [34, 263, 357, 705] }, '}' => { wx: 668, boundingbox: [35, 263, 633, 705] }, '~' => { wx: 668, boundingbox: [36, 263, 634, 705] }, \"\\u0080\" => { wx: 390, boundingbox: [35, -14, 356, 705] }, \"\\u0081\" => { wx: 390, boundingbox: [35, -14, 355, 705] }, \"\\u0082\" => { wx: 317, boundingbox: [35, 0, 283, 692] }, \"\\u0083\" => { wx: 317, boundingbox: [35, 0, 283, 692] }, \"\\u0084\" => { wx: 276, boundingbox: [35, 0, 242, 692] }, \"\\u0085\" => { wx: 276, boundingbox: [35, 0, 242, 692] }, \"\\u0086\" => { wx: 509, boundingbox: [35, 0, 475, 692] }, \"\\u0087\" => { wx: 509, boundingbox: [35, 0, 475, 692] }, \"\\u0088\" => { wx: 410, boundingbox: [35, 0, 375, 692] }, \"\\u0089\" => { wx: 410, boundingbox: [35, 0, 375, 692] }, \"\\u008A\" => { wx: 234, boundingbox: [35, -14, 199, 705] }, \"\\u008B\" => { wx: 234, boundingbox: [35, -14, 199, 705] }, \"\\u008C\" => { wx: 334, boundingbox: [35, 0, 299, 692] }, \"\\u008D\" => { wx: 334, boundingbox: [35, 0, 299, 692] }, \"\\u00A1\" => { wx: 732, boundingbox: [35, -143, 697, 806] }, \"\\u00A2\" => { wx: 544, boundingbox: [56, -14, 488, 706] }, \"\\u00A3\" => { wx: 544, boundingbox: [34, -14, 508, 705] }, \"\\u00A4\" => { wx: 910, boundingbox: [35, 40, 875, 651] }, \"\\u00A5\" => { wx: 667, boundingbox: [35, -14, 633, 705] }, \"\\u00A6\" => { wx: 760, boundingbox: [35, -14, 726, 705] }, \"\\u00A7\" => { wx: 760, boundingbox: [0, 121, 758, 569] }, \"\\u00A8\" => { wx: 776, boundingbox: [35, 0, 741, 705] }, \"\\u00A9\" => { wx: 595, boundingbox: [34, -14, 560, 705] }, \"\\u00AA\" => { wx: 694, boundingbox: [35, -14, 659, 705] }, \"\\u00AB\" => { wx: 626, boundingbox: [34, 0, 591, 705] }, \"\\u00AC\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00AD\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00AE\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00AF\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B0\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B1\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B2\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B3\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B4\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B5\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B6\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B7\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B8\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00B9\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BA\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BB\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BC\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BD\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BE\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00BF\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C0\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C1\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C2\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C3\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C4\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C5\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C6\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C7\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C8\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00C9\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CA\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CB\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CC\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CD\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CE\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00CF\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00D0\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00D1\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00D2\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00D3\" => { wx: 788, boundingbox: [35, -14, 754, 705] }, \"\\u00D4\" => { wx: 894, boundingbox: [35, 58, 860, 634] }, \"\\u00D5\" => { wx: 838, boundingbox: [35, 152, 803, 540] }, \"\\u00D6\" => { wx: 1016, boundingbox: [34, 152, 981, 540] }, \"\\u00D7\" => { wx: 458, boundingbox: [35, -127, 422, 820] }, \"\\u00D8\" => { wx: 748, boundingbox: [35, 94, 698, 597] }, \"\\u00D9\" => { wx: 924, boundingbox: [35, 140, 890, 552] }, \"\\u00DA\" => { wx: 748, boundingbox: [35, 94, 698, 597] }, \"\\u00DB\" => { wx: 918, boundingbox: [35, 166, 884, 526] }, \"\\u00DC\" => { wx: 927, boundingbox: [35, 32, 892, 660] }, \"\\u00DD\" => { wx: 928, boundingbox: [35, 129, 891, 562] }, \"\\u00DE\" => { wx: 928, boundingbox: [35, 128, 893, 563] }, \"\\u00DF\" => { wx: 834, boundingbox: [35, 155, 799, 537] }, \"\\u00E0\" => { wx: 873, boundingbox: [35, 93, 838, 599] }, \"\\u00E1\" => { wx: 828, boundingbox: [35, 104, 791, 588] }, \"\\u00E2\" => { wx: 924, boundingbox: [35, 98, 889, 594] }, \"\\u00E3\" => { wx: 924, boundingbox: [35, 98, 889, 594] }, \"\\u00E4\" => { wx: 917, boundingbox: [35, 0, 882, 692] }, \"\\u00E5\" => { wx: 930, boundingbox: [35, 84, 896, 608] }, \"\\u00E6\" => { wx: 931, boundingbox: [35, 84, 896, 608] }, \"\\u00E7\" => { wx: 463, boundingbox: [35, -99, 429, 791] }, \"\\u00E8\" => { wx: 883, boundingbox: [35, 71, 848, 623] }, \"\\u00E9\" => { wx: 836, boundingbox: [35, 44, 802, 648] }, \"\\u00EA\" => { wx: 836, boundingbox: [35, 44, 802, 648] }, \"\\u00EB\" => { wx: 867, boundingbox: [35, 101, 832, 591] }, \"\\u00EC\" => { wx: 867, boundingbox: [35, 101, 832, 591] }, \"\\u00ED\" => { wx: 696, boundingbox: [35, 44, 661, 648] }, \"\\u00EE\" => { wx: 696, boundingbox: [35, 44, 661, 648] }, \"\\u00EF\" => { wx: 874, boundingbox: [35, 77, 840, 619] }, \"\\u00F1\" => { wx: 874, boundingbox: [35, 73, 840, 615] }, \"\\u00F2\" => { wx: 760, boundingbox: [35, 0, 725, 692] }, \"\\u00F3\" => { wx: 946, boundingbox: [35, 160, 911, 533] }, \"\\u00F4\" => { wx: 771, boundingbox: [34, 37, 736, 655] }, \"\\u00F5\" => { wx: 865, boundingbox: [35, 207, 830, 481] }, \"\\u00F6\" => { wx: 771, boundingbox: [34, 37, 736, 655] }, \"\\u00F7\" => { wx: 888, boundingbox: [34, -19, 853, 712] }, \"\\u00F8\" => { wx: 967, boundingbox: [35, 124, 932, 568] }, \"\\u00F9\" => { wx: 888, boundingbox: [34, -19, 853, 712] }, \"\\u00FA\" => { wx: 831, boundingbox: [35, 113, 796, 579] }, \"\\u00FB\" => { wx: 873, boundingbox: [36, 118, 838, 578] }, \"\\u00FC\" => { wx: 927, boundingbox: [35, 150, 891, 542] }, \"\\u00FD\" => { wx: 970, boundingbox: [35, 76, 931, 616] }, \"\\u00FE\" => { wx: 918, boundingbox: [34, 99, 884, 593] } }\n # make two correlating arrays (indexes reffer to the same data), one for font names and the other for the fonts matrics.\n fonts_metrics_array = [times_metrics, times_bold_metrics, times_italic_metrics, times_bolditalic_metrics,\n helvetica_metrics, helvetica_bold_metrics, helvetica_oblique_metrics, helvetica_oblique_metrics,\n courier_metrics, courier_bold_metrics, courier_oblique_metrics, courier_boldoblique_metrics,\n symbol_metrics, zapfdingbats_metrics]\n fonts_names_array = [:\"Times-Roman\",\n :\"Times-Bold\",\n :\"Times-Italic\",\n :\"Times-BoldItalic\",\n :Helvetica,\n :\"Helvetica-Bold\",\n :\"Helvetica-BoldOblique\",\n :\"Helvetica-Oblique\",\n :Courier,\n :\"Courier-Bold\",\n :\"Courier-Oblique\",\n :\"Courier-BoldOblique\",\n :Symbol,\n :ZapfDingbats]\n\n # create the font object and register the font for each one of the 14 fonts\n fonts_names_array.each_index do |i|\n CombinePDF::Fonts.register_font fonts_names_array[i], fonts_metrics_array[i], Type: :Font, Subtype: :Type1, BaseFont: fonts_names_array[i]\n end\n end",
"def parse_fnt_file(filename)\n fontFile = File.open(filename)\n \n fontFile.each_line { |line|\n \n entries = line.split(\" \")\n \n case entries[0]\n # Info about the font\n when \"info\"\n for i in 1...entries.length\n key_value = entries[i].split(\"=\")\n case key_value[0]\n when \"size\", \"stretchH\", \"spacing\", \"outline\"\n @info[key_value[0].to_sym] = key_value[1].to_i\n when \"bold\", \"italic\", \"unicode\", \"smooth\", \"aa\"\n @info[key_value[0].to_sym] = (key_value[1].to_i == 1)\n end\n end\n \n # Info common to all characters\n when \"common\"\n for i in 1...entries.length\n key_value = entries[i].split(\"=\")\n case key_value[0]\n when \"lineHeight\", \"base\", \"scaleW\", \"scaleH\", \"pages\"\n @common[key_value[0].to_sym] = key_value[1].to_i\n when \"packed\"\n @common[key_value[0].to_sym] = (key_value[1].to_i == 1)\n end\n end\n \n # Info about the bitmap pages that contain the characters\n when \"page\"\n page = Page.new\n \n for i in 1...entries.length\n key_value = entries[i].split(\"=\")\n case key_value[0]\n when \"id\"\n page[key_value[0].to_sym] = key_value[1].to_i\n when \"file\"\n page[key_value[0].to_sym] = key_value[1].tr(\"\\\"\", \"\")\n end\n end\n \n @pages[page.id] = page\n \n # Info about individual characters\n when \"char\"\n char = Char.new\n \n for i in 1...entries.length\n key_value = entries[i].split(\"=\")\n case key_value[0]\n when \"id\", \"x\", \"y\", \"width\", \"height\", \"xoffset\", \"yoffset\", \"xadvance\", \"page\"\n char[key_value[0].to_sym] = key_value[1].to_i\n end\n end\n \n @chars[char.id] = char\n end\n }\n end",
"def update\n @project = Project.find(params[:project_id])\n @font_set = @project.font_sets.find(params[:id])\n @font_families = get_font_family_array(@project)\n respond_to do |format|\n if @font_set.update(font_set_params)\n format.html { redirect_to project_details_url(@project), notice: 'Font set was successfully updated.' }\n format.json { render :show, status: :ok, location: @font_set }\n else\n format.html { render :edit }\n format.json { render json: @font_set.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set(asset,new_value,slide_name_or_index)\n\t\t\t# TODO: how to support non-ttf fonts?\n\t\t\tsuper( asset, new_value.sub(%r{^\\.[/\\\\]},'').sub(%r{^fonts[/\\\\]},'').sub(%r{\\.ttf$},''), slide_name_or_index )\n\t\tend",
"def change_column_font(col, change_type, arg, font, xf_id)\n validate_workbook\n validate_nonnegative(col)\n increase_columns(col)\n\n i = get_cols_index(col)\n\n # Modify font array and retrieve new font id\n font_id = modify_font(@workbook, font, xf_id[:fontId].to_s)\n # Get copy of xf object with modified font id\n xf = deep_copy(xf_id)\n xf[:fontId] = Integer(font_id)\n # Modify xf array and retrieve new xf id\n modify_xf(@workbook, xf)\n\n change_cols(i, col)\n\n @sheet_data.each_with_index do |row, i|\n c = row[col]\n unless c.nil?\n font_switch(c, change_type, arg)\n end\n end\n end",
"def parse_font_options(options = {})\n return if (options.keys & [:fg_color, :sz, :b, :i, :u, :strike, :outline, :shadow, :charset, :family, :font_name]).empty?\n\n Axlsx.instance_values_for(fonts.first).each do |key, value|\n # Thanks for that 1.8.7 - cant do a simple merge...\n options[key.to_sym] = value unless options.key?(key.to_sym)\n end\n font = Font.new(options)\n font.color = Color.new(rgb: options[:fg_color]) if options[:fg_color]\n font.name = options[:font_name] if options[:font_name]\n options[:type] == :dxf ? font : fonts << font\n end",
"def fonts(workbook, _center, heading, _colors)\n worksheet = workbook.add_worksheet('Fonts')\n\n worksheet.set_column(0, 0, 30)\n worksheet.set_column(1, 1, 10)\n\n worksheet.write(0, 0, \"Font name\", heading)\n worksheet.write(0, 1, \"Font size\", heading)\n\n fonts = []\n fonts << [10, 'Arial']\n fonts << [12, 'Arial']\n fonts << [14, 'Arial']\n fonts << [12, 'Arial Black']\n fonts << [12, 'Arial Narrow']\n fonts << [12, 'Century Schoolbook']\n fonts << [12, 'Courier']\n fonts << [12, 'Courier New']\n fonts << [12, 'Garamond']\n fonts << [12, 'Impact']\n fonts << [12, 'Lucida Handwriting']\n fonts << [12, 'Times New Roman']\n fonts << [12, 'Symbol']\n fonts << [12, 'Wingdings']\n fonts << [12, 'A font that doesn\\'t exist']\n\n i = 0\n fonts.each do |font|\n format = workbook.add_format\n\n format.set_size(font[0])\n format.set_font(font[1])\n\n i += 1\n worksheet.write(i, 0, font[1], format)\n worksheet.write(i, 1, font[0], format)\n end\n end",
"def set_fonts\n font_families.update Elegant.configuration.fonts\n fallback_fonts ['Fallback']\n end",
"def convert_font_args(params)\n return unless params\n font = {\n :_name => params[:name],\n :_color => params[:color],\n :_size => params[:size],\n :_bold => params[:bold],\n :_italic => params[:italic],\n :_underline => params[:underline],\n :_pitch_family => params[:pitch_family],\n :_charset => params[:charset],\n :_baseline => params[:baseline] || 0\n }\n\n # Convert font size units.\n font[:_size] *= 100 if font[:_size] && font[:_size] != 0\n\n font\n end",
"def update!(**args)\n @font_id = args[:font_id] if args.key?(:font_id)\n @font_size = args[:font_size] if args.key?(:font_size)\n @median_height = args[:median_height] if args.key?(:median_height)\n @median_line_height = args[:median_line_height] if args.key?(:median_line_height)\n @median_line_space = args[:median_line_space] if args.key?(:median_line_space)\n @median_line_span = args[:median_line_span] if args.key?(:median_line_span)\n @median_width = args[:median_width] if args.key?(:median_width)\n @num_line_spaces = args[:num_line_spaces] if args.key?(:num_line_spaces)\n @num_lines = args[:num_lines] if args.key?(:num_lines)\n @num_symbols = args[:num_symbols] if args.key?(:num_symbols)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def change_font_name(new_font_name = 'Verdana')\n validate_worksheet\n\n font = get_cell_font.dup\n font.set_name(new_font_name)\n update_font_references(font)\n end",
"def convert_fonts\n out = \"\"\n sass.each_line do |line|\n line.gsub!(/(\\s*)(word-spacing|letter-spacing|font-size|line-height|margin-[^\\s]+|margin|padding-[\\s]+|padding)\\s*:(.*)/) do |m|\n # indent rule: value\n m = \"#{$1}#{$2}: #{CSS.val_to_em($3)}\"\n end\n out << line\n end\n self.sass = out\n end",
"def add_font font\n @fonts.push(font).uniq! if font\n font\n end",
"def pbSetSystemFont(bitmap)\n fontname=MessageConfig.pbGetSystemFontName\n bitmap.font.name=fontname\n if fontname==\"Pokemon FireLeaf With BLD\" || fontname==\"Power Red and Green With BLD\"\n bitmap.font.size=29\n elsif fontname==\"Pokemon Emerald Small With BLD\" || fontname==\"Power Green Small With BLD\"\n bitmap.font.size=25\n else\n bitmap.font.size=31\n end\nend",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf)\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def font_set(regex, &block)\n Ray::FontSet.add_set(regex, &block)\n end",
"def read_styles(doc)\n @numFmts = []\n @cellXfs = []\n fonts = []\n \n #TODO: doc.find(\"//*[local-name()='numFmt']\").each do |numFmt|\n doc.xpath(\"//*[local-name()='numFmt']\").each do |numFmt|\n # TODO: numFmtId = numFmt.attributes.to_h['numFmtId']\n numFmtId = numFmt.attributes['numFmtId']\n #TODO: formatCode = numFmt.attributes.to_h['formatCode']\n formatCode = numFmt.attributes['formatCode']\n @numFmts << [numFmtId, formatCode]\n end\n #TODO: doc.find(\"//*[local-name()='fonts']\").each do |fonts_el|\n doc.xpath(\"//*[local-name()='fonts']\").each do |fonts_el|\n #TODO: fonts_el.each_element do |font_el|\n fonts_el.children.each do |font_el|\n #TODO: if font_el.name == 'font'\n if font_el == 'font'\n font = Excelx::Font.new\n font_el.each_element do |font_sub_el|\n case font_sub_el.name\n when 'b'\n font.bold = true\n when 'i'\n font.italic = true\n when 'u'\n font.underline = true\n end\n end\n fonts << font\n end\n end\n end\n \n #TODO: doc.find(\"//*[local-name()='cellXfs']\").each do |xfs|\n doc.xpath(\"//*[local-name()='cellXfs']\").each do |xfs|\n xfs.children.each do |xf|\n #TODO: numFmtId = xf.attributes.to_h['numFmtId']\n numFmtId = xf['numFmtId']\n @cellXfs << [numFmtId]\n #TODO: fontId = xf.attributes.to_h['fontId'].to_i\n fontId = xf['fontId'].to_i\n @style_definitions << fonts[fontId]\n end\n end\n end",
"def SetFont(family, style='', size=0)\n\t\t# save previous values\n\t\t@prevfont_family = @font_family;\n\t\t@prevfont_style = @font_style;\n\n\t\tfamily=family.downcase;\n\t\tif (family=='')\n\t\t\tfamily=@font_family;\n\t\tend\n\t\tif ((!@is_unicode) and (family == 'arial'))\n\t\t\tfamily = 'helvetica';\n\t\telsif ((family==\"symbol\") or (family==\"zapfdingbats\"))\n\t\t\tstyle='';\n\t\tend\n\t\t\n\t\tstyle=style.upcase;\n\n\t\tif (style.include?('U'))\n\t\t\t@underline=true;\n\t\t\tstyle= style.gsub('U','');\n\t\telse\n\t\t\t@underline=false;\n\t\tend\n\t\tif (style.include?('D'))\n\t\t\t@deleted=true;\n\t\t\tstyle= style.gsub('D','');\n\t\telse\n\t\t\t@deleted=false;\n\t\tend\n\t\tif (style=='IB')\n\t\t\tstyle='BI';\n\t\tend\n\t\tif (size==0)\n\t\t\tsize=@font_size_pt;\n\t\tend\n\n\t\t# try to add font (if not already added)\n\t\tAddFont(family, style);\n\t\t\n\t\t#Test if font is already selected\n\t\tif ((@font_family == family) and (@font_style == style) and (@font_size_pt == size))\n\t\t\treturn;\n\t\tend\n\t\t\n\t\tfontkey = family + style;\n\t\tstyle = '' if (@fonts[fontkey].nil? and !@fonts[family].nil?)\n \n\t\t#Test if used for the first time\n\t\tif (@fonts[fontkey].nil?)\n\t\t\t#Check if one of the standard fonts\n\t\t\tif (!@core_fonts[fontkey].nil?)\n\t\t\t\tif @@fpdf_charwidths[fontkey].nil?\n\t\t\t\t\t#Load metric file\n\t\t\t\t\tfile = family;\n\t\t\t\t\tif ((family!='symbol') and (family!='zapfdingbats'))\n\t\t\t\t\t\tfile += style.downcase;\n\t\t\t\t\tend\n\t\t\t\t\tif (getfontpath(file + '.rb').nil?)\n\t\t\t\t\t\t# try to load the basic file without styles\n\t\t\t\t\t\tfile = family;\n\t\t\t\t\t\tfontkey = family;\n\t\t\t\t\tend\n\t\t\t\t\trequire(getfontpath(file + '.rb'));\n \t\tfont_desc = TCPDFFontDescriptor.font(file)\n\t\t\t\t\tif ((@is_unicode and ctg.nil?) or ((!@is_unicode) and (@@fpdf_charwidths[fontkey].nil?)) )\n\t\t\t\t\t\tError(\"Could not include font metric file [\" + fontkey + \"]: \" + getfontpath(file + \".rb\"));\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\ti = @fonts.length + 1;\n\n\t\t\t\tif (@is_unicode)\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type' => font_desc[:type], 'name' => font_desc[:name], 'desc' => font_desc[:desc], 'up' => font_desc[:up], 'ut' => font_desc[:ut], 'cw' => font_desc[:cw], 'enc' => font_desc[:enc], 'file' => font_desc[:file], 'ctg' => font_desc[:ctg]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\telse\n\t\t\t\t\t@fonts[fontkey] = {'i' => i, 'type'=>'core', 'name'=>@core_fonts[fontkey], 'up'=>-100, 'ut'=>50, 'cw' => font_desc[:cw]}\n\t\t\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tError('Undefined font: ' + family + ' ' + style);\n\t\t\tend\n\t\tend\n\t\t#Select it\n\t\t@font_family = family;\n\t\t@font_style = style;\n\t\t@font_size_pt = size;\n\t\t@font_size = size / @k;\n\t\t@current_font = @fonts[fontkey]; # was & may need deep copy?\n\t\tif (@page>0)\n\t\t\tout(sprintf('BT /F%d %.2f Tf ET', @current_font['i'], @font_size_pt));\n\t\tend\n\tend",
"def load_font(family, height)\n end",
"def AddFont(family, style='', file='')\n\t\tif (family.empty?)\n\t\t\treturn;\n\t\tend\n\n\t\t#Add a TrueType or Type1 font\n\t\tfamily = family.downcase\n\t\tif ((!@is_unicode) and (family == 'arial'))\n\t\t\tfamily = 'helvetica';\n\t\tend\n\n\t\tstyle=style.upcase\n\t\tstyle=style.gsub('U','');\n\t\tstyle=style.gsub('D','');\n\t\tif (style == 'IB')\n\t\t\tstyle = 'BI';\n\t\tend\n\n\t\tfontkey = family + style;\n\t\t# check if the font has been already added\n\t\tif !@fonts[fontkey].nil?\n\t\t\treturn;\n\t\tend\n\n\t\tif (file=='')\n\t\t\tfile = family.gsub(' ', '') + style.downcase + '.rb';\n\t\tend\n\t\tfont_file_name = getfontpath(file)\n\t\tif (font_file_name.nil?)\n\t\t\t# try to load the basic file without styles\n\t\t\tfile = family.gsub(' ', '') + '.rb';\n \t\tfont_file_name = getfontpath(file)\n\t\tend\n if font_file_name.nil?\n\t\t\tError(\"Could not find font #{file}.\")\n end\n\t\trequire(getfontpath(file))\n\t\tfont_desc = TCPDFFontDescriptor.font(file)\n\n\t\tif (font_desc[:name].nil? and @@fpdf_charwidths.nil?)\n\t\t\tError('Could not include font definition file');\n\t\tend\n\n\t\ti = @fonts.length+1;\n\t\tif (@is_unicode)\n\t\t\t@fonts[fontkey] = {'i' => i, 'type' => font_desc[:type], 'name' => font_desc[:name], 'desc' => font_desc[:desc], 'up' => font_desc[:up], 'ut' => font_desc[:ut], 'cw' => font_desc[:cw], 'enc' => font_desc[:enc], 'file' => font_desc[:file], 'ctg' => font_desc[:ctg], 'cMap' => font_desc[:cMap], 'registry' => font_desc[:registry]}\n\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\telse\n\t\t\t@fonts[fontkey]={'i' => i, 'type'=>'core', 'name'=>@core_fonts[fontkey], 'up'=>-100, 'ut'=>50, 'cw' => font_desc[:cw]}\n\t\t\t@@fpdf_charwidths[fontkey] = font_desc[:cw];\n\t\tend\n\n\t\tif (!font_desc[:diff].nil? and (!font_desc[:diff].empty?))\n\t\t\t#Search existing encodings\n\t\t\td=0;\n\t\t\tnb=@diffs.length;\n\t\t\t1.upto(nb) do |i|\n\t\t\t\tif (@diffs[i]== font_desc[:diff])\n\t\t\t\t\td = i;\n\t\t\t\t\tbreak;\n\t\t\t\tend\n\t\t\tend\n\t\t\tif (d==0)\n\t\t\t\td = nb+1;\n\t\t\t\t@diffs[d] = font_desc[:diff];\n\t\t\tend\n\t\t\t@fonts[fontkey]['diff'] = d;\n\t\tend\n\t\tif (font_desc[:file] and font_desc[:file].length > 0)\n\t\t\tif (font_desc[:type] == \"TrueType\") or (font_desc[:type] == \"TrueTypeUnicode\")\n\t\t\t\t@font_files[font_desc[:file]] = {'length1' => font_desc[:originalsize]}\n\t\t\telse\n\t\t\t\t@font_files[font_desc[:file]] = {'length1' => font_desc[:size1], 'length2' => font_desc[:size2]}\n\t\t\tend\n\t\tend\n\tend",
"def change_column_font(column_index, change_type, arg, font, xf)\n validate_workbook\n ensure_cell_exists(0, column_index)\n\n xf = workbook.register_new_font(font, xf)\n cols.get_range(column_index).style_index = workbook.register_new_xf(xf, get_col_style(column_index))\n\n sheet_data.rows.each { |row|\n c = row && row[column_index]\n c.font_switch(change_type, arg) unless c.nil?\n }\n end",
"def set_data_font(data_type)\n @maqj_default_font = contents.font.dup unless @maqj_default_font\n contents.font.name = QuestData::FONTNAMES[data_type] ? \n QuestData::FONTNAMES[data_type] : @maqj_default_font.name\n contents.font.size = QuestData::FONTSIZES[data_type] ? \n QuestData::FONTSIZES[data_type] : @maqj_default_font.size\n contents.font.bold = QuestData::FONTBOLDS.keys.include?(data_type) ? \n QuestData::FONTBOLDS[data_type] : @maqj_default_font.bold\n contents.font.italic = QuestData::FONTITALICS.keys.include?(data_type) ?\n QuestData::FONTITALICS[data_type] : @maqj_default_font.italic\n case data_type\n when :objectives then change_color(@maqj_objective_color) if @maqj_objective_color\n when :name then change_color(quest_name_colour(@quest)) if @quest\n else\n change_color(text_color(QuestData::COLOURS[data_type])) if QuestData::COLOURS.keys.include?(data_type)\n end\n end",
"def change_row_font_name(row=0, font_name='Verdana')\n # Get style object\n xf_id = xf_id(get_row_style(row))\n # Get copy of font object with modified name\n font = deep_copy(@workbook.fonts[xf_id[:fontId].to_s][:font])\n font[:name][:attributes][:val] = font_name.to_s\n # Update font and xf array\n change_row_font(row, Worksheet::NAME, font_name, font, xf_id)\n end",
"def load_single_font(name)\n\n # Determine path to font file.\n font_file_name = name.gsub(/\\s+/, \"\")\n path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}.ttf\")\n return unless File.file?(path)\n\n # Determine variants.\n italics_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-Italic.ttf\")\n bold_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-Bold.ttf\")\n bold_italics_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-BoldItalic.ttf\")\n\n # Build hash of variants.\n font_hash = { normal: path }\n font_hash[:italic] = italics_path if File.file?(italics_path)\n font_hash[:bold] = bold_path if File.file?(bold_path)\n font_hash[:bold_italic] = bold_italics_path if File.file?(bold_italics_path)\n\n # Add font.\n self.font_families.update(name => font_hash)\n\n end",
"def fonts\n package 'ttf-freefarsi', :ensure => :installed\n package 'ttf-farsiweb', :ensure => :installed\n end",
"def update!(**args)\n @font_color = args[:font_color] if args.key?(:font_color)\n @format_type = args[:format_type] if args.key?(:format_type)\n end",
"def set_font_from_path(font, bold_font)\n font_name = Pathname.new(font).basename\n @pdf.font_families.update(\n \"#{font_name}\" => {\n normal: font,\n italic: font,\n bold: bold_font,\n bold_italic: bold_font\n }\n )\n @pdf.font(font_name)\n end",
"def new_font(*args)\n font = Font.new\n return font_config_abs(font, *args)\n end",
"def set_font(face, size)\n @curr_font = Gauges::FontRef.get(face, size)\n end",
"def fonts_list\n initiate_library\n FONTS_LIBRARY.keys\n end",
"def load_single_font(name)\n \n # Determine path to font file.\n font_file_name = name.gsub(/\\s+/, \"\")\n path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}.ttf\")\n return unless File.file?(path)\n\n # Determine variants.\n italics_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-Italic.ttf\")\n bold_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-Bold.ttf\")\n bold_italics_path = Rails.root.join('lib', 'assets', 'fonts', \"#{font_file_name}-BoldItalic.ttf\")\n\n # Build hash of variants.\n font_hash = { normal: path }\n font_hash[:italic] = italics_path if File.file?(italics_path)\n font_hash[:bold] = bold_path if File.file?(bold_path)\n font_hash[:bold_italic] = bold_italics_path if File.file?(bold_italics_path)\n\n # Add font.\n self.font_families.update(name => font_hash)\n\n end"
] | [
"0.7217223",
"0.7196069",
"0.6780197",
"0.6752662",
"0.65118295",
"0.6366855",
"0.629789",
"0.6283244",
"0.62583375",
"0.6256828",
"0.61211723",
"0.6081258",
"0.6046304",
"0.6038587",
"0.6025069",
"0.6016777",
"0.60059357",
"0.599288",
"0.5972548",
"0.59718275",
"0.59718275",
"0.59691465",
"0.59589046",
"0.59579986",
"0.5948407",
"0.594835",
"0.59404814",
"0.5931156",
"0.59265906",
"0.59123605",
"0.588286",
"0.58808535",
"0.5857849",
"0.5855892",
"0.58494437",
"0.58300304",
"0.582414",
"0.582414",
"0.5819043",
"0.5802704",
"0.57939196",
"0.5778861",
"0.57611805",
"0.57594675",
"0.5759421",
"0.5755115",
"0.5745318",
"0.5735825",
"0.5732732",
"0.57305795",
"0.5710619",
"0.5672363",
"0.56647164",
"0.56614786",
"0.5660462",
"0.5635211",
"0.5606989",
"0.5600401",
"0.5597524",
"0.5597517",
"0.55865186",
"0.5572978",
"0.55680656",
"0.55673736",
"0.55594397",
"0.5558692",
"0.5557064",
"0.55548555",
"0.5551465",
"0.5550559",
"0.5549144",
"0.55386645",
"0.5530263",
"0.5509591",
"0.55077326",
"0.55017143",
"0.54951495",
"0.54887223",
"0.54887223",
"0.5487287",
"0.5441693",
"0.5436068",
"0.54235494",
"0.54088634",
"0.54033464",
"0.5393829",
"0.53917456",
"0.5388089",
"0.5387217",
"0.53849375",
"0.5372709",
"0.5363866",
"0.53517526",
"0.5344765",
"0.5344593",
"0.5329816",
"0.53195584",
"0.5312946",
"0.53048193"
] | 0.6951909 | 3 |
Performs correct modification based on what type of change_type is specified | def font_switch(change_type, arg)
case change_type
when Worksheet::NAME then change_font_name(arg)
when Worksheet::SIZE then change_font_size(arg)
when Worksheet::COLOR then change_font_color(arg)
when Worksheet::ITALICS then change_font_italics(arg)
when Worksheet::BOLD then change_font_bold(arg)
when Worksheet::UNDERLINE then change_font_underline(arg)
when Worksheet::STRIKETHROUGH then change_font_strikethrough(arg)
else raise 'Invalid change_type'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_type=(value)\n @change_type = value\n end",
"def change_type\n return @change_type\n end",
"def change?\n type == 'change'\n end",
"def changeType(newType)\n\t\t\t#remove accessors for old type\n\t\t\tremoveAccessors()\n\t\t\t@type = newType\n\n\t\t\t#produce accessors for the newly set type\n\t\t\tproduceAccessors()\n\t\tend",
"def change_type_set?\n self.change_type\n end",
"def change_type(prev_type, new_type, key)\n require_writeable_layers!\n file = get(prev_type, key)\n store_immediately!(new_type, file)\n layers.immediate.writeable.each do |layer|\n layer.delete(prev_type, key)\n end\n if layers.delayed.writeable.any?\n Dis::Jobs::ChangeType.perform_later(prev_type, new_type, key)\n end\n key\n end",
"def record_relationship_change(field_name, change_type, change_id)\n\t\t\t\t\tself.changed_attributes_aado ||= []\n\t\t\t\t\tif change_type == :add then\n\t\t\t\t\t\tlast_change = self.changed_attributes_aado.last\n\t\t\t\t\t\t# if the last change was for this same field\n\t\t\t\t\t\tif last_change && last_change.name == field_name then\n\t\t\t\t\t\t\t# we combine the removal and add into one replace change\n\t\t\t\t\t\t\tself.changed_attributes_aado.delete_at(self.changed_attributes_aado.size-1)\n\t\t\t\t\t\t\tself.changed_attributes_aado << Change.new(field_name, last_change.old_value, change_id)\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\t# its just an add\n\t\t\t\t\t\t\tself.changed_attributes_aado << Change.new(field_name, nil, change_id)\n\t\t\t\t\t\tend\n\t\t\t\t\telsif change_type == :remove then\n\t\t\t\t\t\tself.changed_attributes_aado << Change.new(field_name, change_id, nil)\n\t\t\t\t\tend\n\t\t\t \t $TRACE.debug 5, \"record_relationship_change: #{self.class}:#{self.id}: changed_attributes = #{changed_attributes_aado.inspect}\"\n\t\t\t\tend",
"def font_switch(c,change_type,arg)\n case change_type\n when Worksheet::NAME\n unless arg.is_a?String\n raise 'Not a String'\n end\n c.change_font_name(arg)\n when Worksheet::SIZE\n unless arg.is_a?(Integer) || arg.is_a?(Float)\n raise 'Not a Number'\n end\n c.change_font_size(arg)\n when Worksheet::COLOR\n Color.validate_color(arg)\n c.change_font_color(arg)\n when Worksheet::ITALICS\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_italics(arg)\n when Worksheet::BOLD\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_bold(arg)\n when Worksheet::UNDERLINE\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_underline(arg)\n when Worksheet::STRIKETHROUGH\n unless arg == !!arg\n raise 'Not a boolean'\n end\n c.change_font_strikethrough(arg)\n else\n raise 'Invalid change_type'\n end\n end",
"def change_type_of_issue(issue_name, new_type_id)\n\tissue = @client.Issue.find(issue_name)\n\tissue.save({\"fields\"=>{\"issuetype\"=>{\"id\"=>new_type_id}}})\nend",
"def process_genomic_change(genotype, record)\n genomic_change = record.raw_fields['genomicchange']\n case genomic_change.strip\n when /NC_0*(?<chr_num>\\d+)\\.\\d+:g\\.(?<genomicchange>.+)/i\n genotype.add_parsed_genomic_change($LAST_MATCH_INFO[:chr_num].to_i,\n $LAST_MATCH_INFO[:genomicchange])\n when nil, ''\n @logger.warn 'Genomic change was empty'\n else\n @logger.warn 'Genomic change did not match expected format,'\\\n \"adding raw: #{genomic_change}\"\n genotype.add_raw_genomic_change(genomic_change)\n end\n end",
"def mutate!(type)\n set_type(type)\n end",
"def type\n if changes.length > 0 && changes[0].defect_type\n \"#{changes[0].defect_type.capitalize} \"\n else\n \"\"\n end\n end",
"def replace_update_type # abstract\n raise 'abstract'\n end",
"def run(changes)\n changes.each do |(id, salesforce_model), accumulator|\n next unless salesforce_model == @mapping.salesforce_model\n\n database_instance = @mapping.database_record_type.find(id)\n salesforce_instance = @mapping.salesforce_record_type.find(id)\n next unless database_instance && salesforce_instance\n\n update(database_instance, accumulator) if @strategy.to_database?\n update(salesforce_instance, accumulator) if @strategy.to_salesforce?\n end\n end",
"def can_change_type?\n new_record? || self.class.valid_migration_paths[self.field_type].present?\n end",
"def get_data(type, change_id = nil)\n case type\n when \"details\"\n when \"messages\"\n when \"basic\"\n else\n end\nend",
"def change_type\n\t\t\trender json: User.update_type_by_id(params[:id], params[:type], params[:is])\n\t\tend",
"def type= new_type\n @gapi.update! type: verify_type(new_type)\n end",
"def change(params); end",
"def changeMetadataType\n begin\n old_type = params[:metadatatypename].to_s.strip.downcase\n new_type = params[:new_metadata_type].to_s.strip.downcase\n puts \"old_t: \" + old_type\n puts \"new_t: \" + new_type\n\n if old_type == \"\"\n render :text => \"Type of metadata not given\", :status => 404\n return\n end\n\n if new_type == \"\"\n render :text => \"Type of new metadata not given\", :status => 404\n return\n end\n\n # Find old metadata type\n @metadatatype = MetadataType.find_by_name(old_type)\n\n # If old metadata type was not found\n if @metadatatype == nil\n render :text => \"Old metadata type not found\", :status => 404\n return\n end\n\n # Check that new type doesn't exist already\n # @@existing_metadata_types listed in the beginning of file\n if MetadataType.find_by_name(new_type) or @@existing_metadata_types.include?(new_type)\n render :text => \"Type of new metadata already exists\", :status => 404\n return\n end\n\n # Change metadata type name\n @metadatatype.update_attribute(:name, new_type)\n render :text => \"Metadata type changed\", :status => 200\n return\n\n rescue => e\n puts \"Error in changing metadatatype: #{e.to_s}\".background(:red)\n render :text => \"Conflict\", :status => 409\n return\n end\n end",
"def set_type\n case content\n when \"empty\" then self.content_type = \"empty\"\n when /ire/ then self.content_type = \"ire\"\n when /hill|tree/ then self.content_type = \"obstacle\"\n when /axe|sword|lance/ then self.content_type = \"enemy\"\n else raise \"Change type\"\n end\n end",
"def update\n\n @specline_update = @specline\n #private method to update txt1 values following change to specline_line linetype\n \n old_linetype = Linetype.find(@specline.linetype_id)\n new_linetype = Linetype.find(params[:specline][:linetype_id]) \n \n if old_linetype.txt1 != new_linetype.txt1 \n txt1_change_linetype(@specline, old_linetype, new_linetype)\n if !@subsequent_prefixes.blank?\n @subsequent_prefixes.compact\n end \n end\n #call to private method that records change to line in Changes table\n \n #only record change if linetype is changed ignoring txt1 \n old_linetype_array = [old_linetype.txt3, old_linetype.txt4, old_linetype.txt5, old_linetype.txt6] \n new_linetype_array = [new_linetype.txt3, new_linetype.txt4, new_linetype.txt5, new_linetype.txt6] \n if new_linetype_array != old_linetype_array\n record_change \n end\n #if new linetype is for product data set identity and perform value pairs to 'not specified'\n if [10,11].include?(params[:specline][:linetype_id])\n @specline_update.update_attributes(:linetype_id => new_linetype.id, :perform_id => 1, :identity_id => 1)\n else\n @specline_update.update_attributes(params[:specline])\n end \n \n end",
"def change_type_required?\n self.change_class_set? && self.change_class.change_types.size > 0\n end",
"def global_registry_relationship_change_action(type)\n [global_registry_relationship(type).primary_foreign_key,\n global_registry_relationship(type).related_foreign_key].each do |key|\n if previous_changes.key?(key)\n # Delete if changed from anything to nil\n return :delete if previous_changes[key].last.nil?\n # Replace if value changed\n return :replace if previous_changes[key].first != previous_changes[key].last &&\n !previous_changes[key].first.nil?\n elsif key.present? && send(key).nil?\n # Ignore if value didn't change and foreign_key is nil\n return :ignore\n end\n end\n # otherwise Create/Update\n :push\n end",
"def update\n prms = @boat_type.is_modification? ? modification_params : boat_type_params\n respond_to do |format|\n if @boat_type.update(prms)\n format.html { redirect_to edit_boat_type_path(@boat_type), notice: 'Тип лодки успешно обновлён' }\n format.json { render json: @boat_type.hash_view('control'), status: :ok}\n else\n format.html { render :edit }\n format.json { render json: @boat_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @change_type = args[:change_type] if args.key?(:change_type)\n @index = args[:index] if args.key?(:index)\n end",
"def update_type\n\t\tfname= \"#{self.class.name}.#{__method__}\"\n\t\tLOG.debug(fname){\"params=#{params.inspect}\"}\n\t\t@object_plm = Customer.find(params[:id])\n\t\tctrl_update_type @object_plm, params[:object_type]\n\tend",
"def can_change( record, type = '*' )\n name, type = get_name_and_type_from_param( record, type )\n self.permissions['allowed'] << [ name, type ]\n end",
"def modify( inputs, type )\n if subfilter\n inputs = subfilter.modify( inputs, type )\n end\n result = self.transforms( inputs, type )\n return result \n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def change\n # just use this for now\nend",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def update!(**args)\n @type = args[:type] if args.key?(:type)\n end",
"def can_modify\n\t\tself.changed_attributes.each do |attr|\n\n\t\t\tif attr.to_s == \"reports\"\n\t\t\t\tself.reports.each do |r|\n\t\t\t\t\tunless r.changed_attributes.blank?\n\t\t\t\t\t\tif r.owner_ids.include? self.created_by_user_id\n\t\t\t\t\t\telsif r.owner_ids.include? self.created_by_user.organization.id.to_s\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tself.errors.add(:reports,\"You cannot edit #{attr.name.to_s}\")\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\n\t\t\telsif attr.to_s == \"recipients\"\n\t\t\t\trecipients_changed\n\t\t\telsif attr.to_s == \"payments\"\n\t\t\t\told_payment_not_deleted\n\t\t\telse\n\t\t\t\t## only in case of \n\t\t\t\tif self.owner_ids.include? self.created_by_user.id.to_s\n\t\t\t\telsif self.owner_ids.include? self.created_by_user.organization.id.to_s\n\t\t\t\telse\n\t\t\t\t\tself.errors.add(:owner_ids,\"You cannot edit the field: #{attr.to_s}\")\n\t\t\t\tend\n\t\t\tend\n\n\t\tend\n\tend",
"def update\n respond_to do |format|\n if @affected_type.update(affected_type_params)\n format.html { redirect_to @affected_type, notice: 'Affected type was successfully updated.' }\n format.json { render :show, status: :ok, location: @affected_type }\n else\n format.html { render :edit }\n format.json { render json: @affected_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update(type, *args)\n change_class = Duxml::const_get \"#{type.to_s}Class\".to_sym\n change_comp = change_class.new *args\n @nodes.unshift change_comp\n changed\n notify_observers(change_comp) unless change_comp.respond_to?(:error?)\n raise(Exception, change_comp.description) if strict? && type == :QualifyError\n end",
"def time_type=(new_time_type)\n write_attribute(:time_type, new_time_type)\n swap_start_finish\n end",
"def update\n respond_to do |format|\n if @comment_status_type.update(comment_status_type_params)\n if @comment_status_type.previous_changes.any?\n save_change_log(current_user,{object_type: 'comment status type', action_type: 'edit', description: \"edited comment status type ID ##{@comment_status_type.id} to '#{@comment_status_type.status_text}', '#{@comment_status_type.color_name}'\"})\n end\n format.html { redirect_to comment_status_types_path, notice: 'Comment status type was successfully updated.' }\n format.json { render :show, status: :ok, location: @comment_status_type }\n else\n set_select_options\n format.html { render :edit }\n format.json { render json: @comment_status_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def can_change?( record, type = '*' )\n name, type = get_name_and_type_from_param( record, type )\n\n # NS records?\n return false if type == 'NS' || type == 'SOA'\n\n # Type protected?\n return false if self.permissions['protected_types'].include?( type )\n\n # RR protected?\n return false if self.permissions['protected'].detect do |r|\n r[0] == name && (r[1] == type || r[1] == '*' || type == '*')\n end\n\n # Allowed?\n return true if self.permissions['allowed'].detect do |r|\n r[0] == name && (r[1] == type || r[1] == '*' || type == '*')\n end\n\n # Default policy\n return self.permissions['policy'] == 'allow'\n end",
"def change\n attributes.fetch(:change)\n end",
"def tell_change(change)\n puts \"You will need to give the customer\"\n if change[:dollars] !=0\n puts \"#{change[:dollars]} dollars\"\n else\n puts \"\"\n end\n if change[:quarters] != 0\n puts \"#{change[:quarters]} quarters\"\n else\n puts \"\"\n end\n if change[:dimes] != 0\n puts \"#{change[:dimes]} dimes\"\n else\n puts \"\"\n end\n if change[:nickels] != 0\n puts \"#{change[:nickels]} nickels\"\n else\n puts \"\"\n end\n if change[:pennies] != 0\n puts \"#{change[:pennies]} pennies\"\n else\n puts \"\"\n end\n end",
"def should_skip_based_on?(old_type)\n if old_type != CURRENT_TYPE_TO_REPLACE\n return true\n end\n\n false\nend",
"def test_truth\n assert_kind_of Change, @change\n end",
"def change\n change_column(:table_name, :column_name, :new_type)\nend",
"def change_branch(branch_type, event_type, old_branch, new_branch)\n body =\n case event_type.to_s\n when 'delete'\n \"deleted the `#{old_branch}` branch. This merge request now targets the `#{new_branch}` branch\"\n when 'update'\n \"changed #{branch_type} branch from `#{old_branch}` to `#{new_branch}`\"\n else\n raise ArgumentError, \"invalid value for event_type: #{event_type}\"\n end\n\n create_note(NoteSummary.new(noteable, project, author, body, action: 'branch'))\n end",
"def update_type\n\t\tfname= \"#{self.class.name}.#{__method__}\"\n\t\t#LOG.debug(fname){\"params=#{params.inspect}\"}\n\t\t@relation = Relation.find(params[:id])\n\t\t@types = Typesobject.get_types(\"relation\")\n\t\t@status = Statusobject.get_status(\"relation\")\n\t\tctrl_update_type @relation, params[:object_type]\n\tend",
"def set_content_type old_type, old_resource_type, new_type, new_resource_type\n xml=self.ng_xml\n xml.search('/contentMetadata[@type=\\''+old_type+'\\']').each do |node|\n node['type']=new_type\n xml.search('//resource[@type=\\''+old_resource_type+'\\']').each do |resource|\n resource['type']=new_resource_type\n end\n end\n self.content=xml.to_s\n end",
"def after_save(change)\n timing \"Expiring change cache\"\n case change.change_type\n when Change::OWNERSHIP\n expire_item_cache(change)\n when Change::PERSON_LOCATION\n expire_map_cache(change)\n when Change::PERSON_MAIN_LOCATION\n expire_map_cache(change)\n end\n end",
"def type=(type); end",
"def create_change(change)\n Change.create(:changeset => self,\n :action => change[:action],\n :path => change[:path],\n :from_path => change[:from_path],\n :from_revision => change[:from_revision])\n end",
"def set_type\n end",
"def source_change(change, filename)\n end",
"def find_and_trigger_event(event_type, args = nil)\r\n \r\n case event_type\r\n when :before_change\r\n \r\n if respond_to?(:before_change)\r\n \r\n results = send(:before_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change\r\n \r\n if respond_to?(:after_change)\r\n \r\n results = send(:after_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_change_field\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:before_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:before_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change_field\r\n #looks in own class for :after_change_field for the field passed, requires the parameter of a Field object to be passed\r\n \r\n #SAPPHIRE UPDATE\r\n #SEARCH FOR ACTIVE MAP DEFINITION THAT INCLUDES THE CURRENT TABLE AND FIELD.\r\n #IF ANY ARE FOUND QUEUE THE PROCESS\r\n if args.table.field_order.include?(\"student_id\")\r\n \r\n if map_id = $tables.attach(\"SAPPHIRE_INTERFACE_MAP\").field_value(\r\n \"primary_id\",\r\n \"WHERE athena_table = '#{table_name }'\r\n AND athena_field = '#{args.field_name }'\r\n AND trigger_event = 'after_change_field'\"\r\n )\r\n \r\n sid = $tables.attach(args.table.table_name).field_value(\"student_id\", \"WHERE primary_id = '#{args.primary_id}'\")\r\n student = $students.get(sid)\r\n \r\n if student && student.active.is_true?\r\n \r\n queue_record = $tables.attach(\"SAPPHIRE_INTERFACE_QUEUE\").new_row\r\n queue_record.fields[\"map_id\" ].value = map_id\r\n queue_record.fields[\"athena_pid\" ].value = args.primary_id\r\n queue_record.save\r\n \r\n end\r\n \r\n end\r\n \r\n end\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:after_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:after_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_load #any table can have this event for self table\r\n \r\n continue_with_load = true\r\n \r\n this_trigger_event = \"before_load_#{table_name.downcase}\"\r\n \r\n tables_with_before_load_events = args ? args : event_array(this_trigger_event)\r\n \r\n tables_with_before_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n \r\n begin\r\n continue_with_load = this_table.send(this_trigger_event)\r\n \r\n rescue=> e\r\n #raise e #THIS SHOULD HAVE BEEN A SYSTEM NOTIFICATION - ADDING NOW BUT LEACING THIS NOTE HERE TO HELP IDENTIFY ANY ISSUES THAT MAY COME TO LIGHT WHICH WERE CONCEALED BY THIS BEFORE...\r\n $base.system_notification(\r\n subject = \"BEFORE LOAD FAILED - #{file}\",\r\n content = \"Don't just stand there and shout it; do something about it... Here's the error:\r\n #{e.message}\r\n <br>\r\n <br>\r\n #{e.backtrace}\"\r\n )\r\n \r\n end\r\n \r\n } if tables_with_before_load_events\r\n \r\n return continue_with_load\r\n \r\n when :after_load #any table can have this event for self table\r\n \r\n this_trigger_event = \"after_load_#{table_name.downcase}\"\r\n \r\n tables_with_after_load_events = args ? args.dup : event_array(this_trigger_event)\r\n \r\n db_config_record(\r\n field_name = \"phase_total\",\r\n new_value = tables_with_after_load_events.join(\",\")\r\n )\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = nil\r\n )\r\n \r\n if !args || args.include?(\"move_source_to_dest\")\r\n tables_with_after_load_events.delete(\"move_source_to_dest\")\r\n move_source_to_dest\r\n end\r\n \r\n tables_with_after_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Started #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n begin\r\n this_table.send(this_trigger_event)\r\n db_config_record = $tables.attach(\"Db_Config\").by_table_name(table_name)\r\n phase_completed = db_config_record.fields[\"phase_completed\"].value\r\n phase_completed = (phase_completed ? \"#{phase_completed},#{file}\" : file)\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = phase_completed\r\n )\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Completed #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n rescue=> e\r\n after_load_failed(message = \"#{file} - #{e.message} <br><br> #{e.backtrace}\", e)\r\n raise e\r\n end\r\n \r\n } if tables_with_after_load_events\r\n \r\n when :after_insert\r\n send(:after_insert, args) if respond_to?(:after_insert)\r\n \r\n when :after_save\r\n send(:after_save, args) if respond_to?(:after_save)\r\n \r\n when :before_insert\r\n #Looks in own class for before_insert event, requires the parameter of a Row object to be passed\r\n if respond_to?(:before_insert)\r\n send(:before_insert, args)\r\n else\r\n return true\r\n end\r\n \r\n end\r\n \r\n return true\r\n \r\n end",
"def apply_rules(record)\n rules.each do |rule|\n if record.note =~ Regexp.new(rule.trigger)\n rule.effects.each do |effect|\n if effect.change_kind?\n record.kind = effect.value if effect.value.to_i.in? Record.kinds\n elsif effect.change_source_account?\n record.source_account = Account.find(effect.value)\n elsif effect.change_target_account?\n record.target_account = Account.find(effect.value)\n record.target_currency = record.source_currency\n record.kind = Record::KIND_TRANSFER\n record.target_amount = record.source_amount\n elsif effect.add_tag?\n record.tags << Tag.find(effect.value)\n elsif effect.change_party?\n record.party = Party.find(effect.value)\n elsif effect.change_group?\n record.group = Group.find(effect.value)\n elsif effect.change_source_sign?\n record.source_amount *= -1\n elsif effect.change_target_sign?\n record.target_amount *= -1\n elsif effect.remove_record?\n record.destroy\n end\n end\n end\n end\n end",
"def update\n respond_to do |format|\n if @adjustment_type.update(adjustment_type_params)\n format.html { redirect_to @adjustment_type, notice: 'Adjustment type was successfully updated.' }\n format.json { render :show, status: :ok, location: @adjustment_type }\n else\n format.html { render :edit }\n format.json { render json: @adjustment_type.errors, status: :unprocessable_entity }\n end\n end\n end",
"def handle_entry_type(entry_type)\n # settings is required to trigger exceptions, which set the definition\n HQMF::DataCriteria.get_settings_for_definition(entry_type, @status)\n @definition = entry_type\n rescue\n # if no exact match then try a string match just using entry definition value\n case entry_type\n when 'Medication', 'Medications'\n @definition = 'medication'\n @status = 'active' unless @status\n when 'RX'\n @definition = 'medication'\n @status = 'dispensed' unless @status\n when nil\n definition_for_nil_entry\n else\n @definition = extract_definition_from_entry_type(entry_type)\n end\n end",
"def handle_entry_type(entry_type)\n # settings is required to trigger exceptions, which set the definition\n HQMF::DataCriteria.get_settings_for_definition(entry_type, @status)\n @definition = entry_type\n rescue\n # if no exact match then try a string match just using entry definition value\n case entry_type\n when 'Medication', 'Medications'\n @definition = 'medication'\n @status = 'active' unless @status\n when 'RX'\n @definition = 'medication'\n @status = 'dispensed' unless @status\n when nil\n definition_for_nil_entry\n else\n @definition = extract_definition_from_entry_type(entry_type)\n end\n end",
"def update_from_old_member\n if self.old_member\n old_member = self.old_member\n\n # If this member is a committee member\n if old_member.tier_id == 3\n\n name = old_member.position.chomp(\"Committee Member\").strip\n committee_type = CommitteeType.committee\n cm_type = CommitteeMemberType.cm\n\n # If this member is a committee chair\n elsif old_member.tier_id == 4\n\n name = old_member.position.chomp(\"Chair\").strip\n committee_type = CommitteeType.committee\n cm_type = CommitteeMemberType.chair\n\n # If this member is an executive\n elsif old_member.tier_id == 5\n\n name = \"Executive\"\n committee_type = CommitteeType.admin\n cm_type = CommitteeMemberType.exec(old_member.position)\n\n # Exit with nil if the correct cm_type was not found\n return nil if cm_type.nil?\n\n # If this member is a general member\n elsif old_member.tier_id == 2\n\n name = \"General Members\"\n committee_type = CommitteeType.general\n cm_type = CommitteeMemberType.gm\n\n end\n\n self.add_to_committee(name, committee_type, cm_type)\n\n # Remove from any general committees unless the member belongs there\n self.remove_from_general unless old_member.tier_id == 2\n\n return self.save\n end\n end",
"def change(column_name, type, **options)\n raise_on_if_exist_options(options)\n @base.change_column(name, column_name, type, **options)\n end",
"def type=(new_type)\n self[:type_flags] = (flags | TYPES[new_type])\n end",
"def set_affected_type\n @affected_type = AffectedType.find(params[:id])\n end",
"def toggle_content_type\n @blog = Blog.find(params[:blog_id])\n @blog.change_content_type\n respond_to do |format|\n format.html {render :edit}\n end\n end"
] | [
"0.7415962",
"0.70975107",
"0.65295625",
"0.63964105",
"0.6308899",
"0.62117475",
"0.6207137",
"0.60622466",
"0.6033784",
"0.59838253",
"0.5969564",
"0.59412515",
"0.57860947",
"0.57648253",
"0.5755102",
"0.57204807",
"0.56712013",
"0.5636245",
"0.56282115",
"0.55879545",
"0.55736655",
"0.55703455",
"0.55702186",
"0.5549111",
"0.5516173",
"0.55053234",
"0.5489521",
"0.5489449",
"0.5411645",
"0.53967893",
"0.5396211",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53950053",
"0.53698194",
"0.53689384",
"0.53627867",
"0.5356191",
"0.5349465",
"0.5342814",
"0.53427076",
"0.53375477",
"0.53372",
"0.5328907",
"0.5310305",
"0.5306763",
"0.5305665",
"0.5294322",
"0.5285219",
"0.5277086",
"0.5274542",
"0.52636176",
"0.526241",
"0.52577645",
"0.5248385",
"0.5234953",
"0.52187365",
"0.52187365",
"0.52095276",
"0.51938134",
"0.51933014",
"0.51916575",
"0.5189543"
] | 0.62650555 | 6 |
insert a message into table | def single_insert(table_name, hash)
status = true
begin
columns = []
values = []
hash.keys.each do |item|
columns.push(item)
values.push("'#{hash[item]}'")
end
columns = columns.join(",")
values = values.join(",")
@mysql_client.query("INSERT INTO #{table_name} (#{columns}) VALUES (#{values})")
rescue
status = false
end
return status
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_message_table\n execute_sql_statement(\"CREATE TABLE messages (\\n\" \\\n \" id CHAR PRIMARY KEY, \\n\" \\\n \" sender CHAR NOT NULL, -- Hostname of sender \\n\" \\\n \" action CHAR NOT NULL, -- The action to perform \\n\" \\\n \" payload CHAR, -- Optional payload \\n\" \\\n \" ack INTEGER DEFAULT 0, -- ack sent \\n\" \\\n \" date_time CHAR NOT NULL, -- Time sent \\n\" \\\n \" direction CHAR DEFAULT 'in', -- In or out bound msg \\n\" \\\n \" processed INTEGER DEFAULT 0 \\n\" \\\n \");\".strip)\n end",
"def record_message(room_hash, is_admin, msg, admin_name, speaker)\n create_chatroom(room_hash, admin_name, 'client_name')\n \n begin\n the_time = Time.now-Time.zone_offset('CST')\n insert_new_msg = @db.prepare \"INSERT INTO chat_messages (room_hash, chat_user_id, msg, speaker, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)\"\n #binding.pry\n insert_new_msg.execute room_hash, is_admin, msg, speaker, the_time, the_time\n \n insert_new_msg.close\n ensure\n #@db.close\n end\n end",
"def push( contents )\n @write_lock.synchronize do\n query( \"INSERT INTO messages (content) VALUES (?)\", contents )\n db.last_insert_rowid\n end\n end",
"def set_message(str,flag)\n db = SQLite3::Database.new(@fn)\n if (! flag)\n # db.busy_timeout=1000 # milliseconds?\n db.busy_timeout(1000) # milliseconds?\n db.transaction(:immediate)\n stmt = db.prepare(\"delete from msg where user_id=?\")\n stmt.execute(@user_id);\n stmt.close\n db.commit\n end\n\n # db.busy_timeout=1000 # milliseconds?\n db.busy_timeout(1000) # milliseconds?\n db.transaction(:immediate)\n stmt = db.prepare(\"insert into msg (user_id,msg_text) values (?,?)\")\n stmt.execute(@user_id, str);\n stmt.close\n db.commit\n db.close();\n end",
"def log_message( level, message )\n t = Time.now\n log_time = \"#{t.strftime('%Y/%m/%d %H:%M:%S')}\"\n\n if @db != nil\n begin\n stm = @db.prepare( 'INSERT INTO tblMessages (mStatus, mTime, mText) VALUES (?, ?, ?)' )\n stm.bind_params(level_text(level), log_time, message)\n rs = stm.execute\n mId = @db.last_insert_row_id.to_s\n stm.close\n rescue ::SQLite3::Exception => e\n Maadi::post_message(:Warn, \"Repository (#{@type}:#{@instance_name}) encountered an error (#{e.message}).\")\n end\n end\n end",
"def store message\n tsync do\n @takable << StoredMessage.new(@pid, next_id, message)\n end\n end",
"def insert_into(table, data)\n\t\tkeys = \"(#{data.keys.join(', ')})\"\n\t\tvalues = \"(#{data.values.map{ |value| \"'#{value}'\" }.join(', ')})\"\n\t\texecute_with_retry \"INSERT INTO #{table} #{keys} VALUES #{values}; \"\n\tend",
"def inject_outbound_message(builder)\n sql = \"insert into messages \\n\" \\\n \"(id, sender, action, payload, ack, direction, date_time) \\n\" \\\n \"values\\n\" \\\n \"('#{builder.id}', '#{builder.sender}', '#{builder.action}', \\n\" \\\n \" '#{builder.payload}', '0', 'out', '#{builder.date_time}');\"\n\n execute_sql_statement(sql)\n end",
"def add_quote(msg)\n\n @db.execute(\"INSERT INTO quotes VALUES(NULL,?)\",msg)\n\n puts \"I added #{msg} to quotes database\"\n return @db.get_first_value(\"SELECT id FROM quotes ORDER BY id DESC LIMIT 1;\")\n end",
"def add_to (table_name,title,code)\r\n\t\"INSERT INTO \" + table_name + \r\n\t\"(title,code) VALUES \r\n\t('\" + title + \"','\" + code + \"')\"\r\nend",
"def insert(phrase)\n if @db.nil?\n return\n end\n\n phrase.strip!\n phrase = [[\"\"]*PARTS, phrase.split(/\\s+/), \"\"].flatten\n\n phrase.each_cons(PARTS+1) do |chunk|\n begin\n @insert.execute chunk.take(PARTS).join(\" \"), chunk[-1]\n rescue\n puts \"failed to insert into database\"\n end\n end\n end",
"def send_message(msg)\n\t\t\tdb_update({}, {'$push' => { 'messages' => msg.id.to_db } })\n\t\t\tmessages.add(msg)\n\t\tend",
"def insert(table,doc)\n connection.insert(path(table),doc)\n end",
"def insert_record(table, values)\n execute table_insert_query(table, values)\n end",
"def processmsg(i,errormsg)\n puts \"error for oid: #{i[\"oid\"]} errormsg: #{errormsg}\"\n\tehid = @@client.execute(%Q/insert into dbo.hydra_publish_error (hpid,date,oid,error) values (#{i[\"hpid\"]},GETDATE(),#{i[\"oid\"]},\"#{errormsg}\")/)\n\tehid.insert\n end",
"def insert()\n\n end",
"def insert()\n\n # Kräver att det finns ett \"set_table(\"Table\")\" i klassen\n @insertable_vars_full = self.instance_variables # Ta med namnen user.username osv\n @insertable_vars_full.shift(1) # Kinda frisky\n @insertable_vars = []\n @insertable_values = []\n @insertable_vars_full.each do |var|\n @insertable_vars << var[1..-1]\n @insertable_values << self.instance_variable_get(var)\n end\n\n\n @insertable_vars_str = @insertable_vars.join(\", \")\n\n @question_marks = \"\"\n @insertable_vars.each do |key|\n @question_marks.concat(\"?,\")\n end\n @question_marks = @question_marks[0..-2]\n\n DB.execute(\"INSERT INTO #{@table} (#{@insertable_vars_str})\n VALUES (#{@question_marks})\", @insertable_values)\n\n end",
"def save_form_data_to_database\n db = get_db\n db.execute 'INSERT INTO Messages (username, phone, email, option, comment)\n VALUES (?, ?, ?, ?, ?)', [@username, @phone, @email, @option, @comment]\n db.close\nend",
"def insert(object, table)\n sql = object.to_sql(table)\n execute(sql)\n end",
"def create\n if !params[:callback].nil? && !params[:id].nil?\n @message_table = MessageTable.where(\"id = ?\", params[:id]).first\n if @message_table.nil?\n render :text => \"#{params[:callback]}({\\\"status\\\" : \\\"error\\\", \\\"msg\\\" : \\\"信息表不存在!\\\"})\"\n elsif !@message_table.effective?\n render :text => \"#{params[:callback]}({\\\"status\\\" : \\\"error\\\", \\\"msg\\\" : \\\"信息表已过期!\\\"})\"\n else\n _message_object = {}\n @message_table.message_columns.each do |message_column|\n if message_column.type_id == 3\n if params[:message_table][message_column.column_name.to_sym].nil?\n _message_object[message_column.column_name] = []\n else\n if params[:message_table][message_column.column_name.to_sym].kind_of?(Array)\n _message_object[message_column.column_name] = params[:message_table][message_column.column_name.to_sym]\n else\n _message_object[message_column.column_name] = [].push(params[:message_table][message_column.column_name.to_sym])\n end\n end\n else\n _message_object[message_column.column_name] = params[:message_table][message_column.column_name.to_sym] || ''\n end\n end\n @message_text = MessageText.new\n @message_text.message_table_id = @message_table.id\n @message_text.text = _message_object\n @message_text.ip = request.remote_ip\n @message_text.created_at = DateTime.now\n @message_text.updated_at = DateTime.now\n if !current_user.nil?\n @message_text.user_id = current_user.uid\n @message_text.user_name = current_user.name\n end\n @message_text.save\n render :text => \"#{params[:callback]}({\\\"status\\\" : \\\"success\\\", \\\"msg\\\" : \\\"信息提交成功!\\\"})\"\n end\n \n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def insert_channel(con,channel)\n sql=\"INSERT into irc_channel (channel) VALUES ('#{channel}');\"\n res=con.query(sql)\nend",
"def add_subject(database, name)\r\n\tdatabase.execute(\"INSERT INTO subjects (name) VALUES (?)\", [name])\r\nend",
"def insert\n DATABASE.execute(\"INSERT INTO boards (title,description) VALUES ('#{@title}', '#{@description}')\")\n @id = DATABASE.last_insert_row_id\n end",
"def insert(template, *data) # :nodoc:\n chk_conn\n conn = @hibernate_session.connection\n stmt = conn.prepare_statement(template)\n data.each do |d|\n d.each_with_index do |item, index|\n if item.kind_of?(Array)\n set_prepared_statement(stmt, item[0], index+1, item[1])\n else\n set_prepared_statement(stmt, item, index+1, nil)\n end\n end\n stmt.execute_update\n end\n conn.commit\n ensure\n stmt.close rescue nil\n end",
"def insert(table, id, attributes) # abstract\n end",
"def send_message(message)\n socket.enqueue_packet(message)\n end",
"def create_message(data); end",
"def create_message(data); end",
"def add_symptom(db, symptom, date, time_of_day, severity)\n $db.execute(\"INSERT INTO symptoms (symptom, date, time_of_day, severity) VALUES (?, ?, ?, ?)\", [symptom, date, time_of_day, severity])\nend",
"def insert_into_article(title, content, price, user_id, adress, phone_number, date_created, tag_id) \n return get_db().execute(\"INSERT INTO article (title, content, price, user_id, adress, phone_number, date_created, tag_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?)\", title, content, price, user_id, adress, phone_number, date_created, tag_id)\n end",
"def enqueue_message(message)\n socket.enqueue_packet(message)\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def insertConUser(idUser,idChannel)\n \n begin\n query = \"INSERT INTO `#{DB_NAME}`.`#{USER_LIST_IN_CHAN_TABBLE}` (`user_id_user`, `channel_id_channel`) \n VALUES (?, ?)\"\n \n self.connect unless self.connected? # => connect to the DB server if not connected\n \n sth = @dbh.prepare(query)\n\n sth.execute(idUser,idChannel)\n sth.finish\n rescue DBI::DatabaseError => e\n puts \"An error occurred\"\n puts \"Error code: #{e.err}\"\n puts \"Error message: #{e.errstr}\"\n @dbh.rollback\n rescue Exception => e \n puts \"error!!! -> : #{e.to_s}\"\n \n ensure\n # disconnect from server\n @dbh.disconnect if @connected\n @connected=false\n end\n end",
"def insert(data) \n set data, 1\n end",
"def insert(data) \n set data, 1\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def add_message(name, message)\n\t\tend",
"def insert_start (db, event)\n\tdb.execute(\"INSERT INTO start (event) VALUES (?)\", [event])\nend",
"def push(data)\n return nil if data.nil?\n obj = MultiJson.decode(data)\n\n table_ = [keyspace, table].compact.join('.')\n queue_name = @keyspace\n created_at = @timeuuid_generator.now\n\n begin\n @semaphore.synchronize do\n\n if obj.has_key?('payload') && !obj['payload'].empty?\n payload = MultiJson.encode(obj['payload'])\n else\n payload = nil\n end\n\n column_names = %w[ queue_name created_at payload ]\n values_placeholders = column_names.map{|_| '?'}.join(',')\n statement = \"INSERT INTO #{table_} ( #{column_names.join(',')} ) VALUES (#{values_placeholders});\"\n\n session.execute(\n session.prepare(statement),\n arguments: [\n queue_name,\n created_at,\n payload\n ])\n end\n rescue Encoding::UndefinedConversionError\n puts $!.error_char.dump\n puts $!.error_char.encoding\n end\n\n @logger.debug { \"Writing this entry [#{[queue_name, created_at].to_s}]\" }\n [queue_name, created_at].to_s\n end",
"def create\n create_message(params)\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def insert_record(name, type, ttl, content)\n records_table.insert(\n :domain_id => domain.id,\n :name => name,\n :type => type, \n :ttl => ttl, \n :content => content,\n :change_date => Time.now.to_i\n )\n end",
"def save\n # 1. What am I saving?\n # 2. input => username, message\n\n # if i saved\n # update OI don't want to do thjis!!!\n # else\n\n sql = <<-SQL\n INSERT INTO tweets (username, message)\n VALUES (?, ?);\n SQL\n\n # SQL Injection, parameterize\n\n DB[:conn].execute(sql, self.username, self.message)\n @id = DB[:conn].execute(\"SELECT * FROM tweets\").last[\"id\"]\n end",
"def storeMessage(msg)\n msg = msg.gsub(\"\\'\", \"\\\\\\\\'\")\n msg.strip! # remove all the whitespaces trailing from irc\n dt = Time.now\n\n # This part splits the messages to store them into the database\n line = msg\n line.chomp!\n tmp = Array.new\n\n # What to ignore\n\n # The if statement makes sure that what is being stored\n # is none from the welcome messages from irc.freenode.net \n # This is somewhat a hack because I'm negating all the raw\n # numerals from IRC, and taking 0 as a true case (IRC raw\n # numerals do not include 0, but converting a string \"example\"\n # to an integer will make it 0). \n if line.split[1] == \"PRIVMSG\" and (line.split[1].to_i < 1 or line.split[1].to_i > 606) then\n \n # This part splits up all the needed information to put inside\n # the database. It might need some cleaning up, because I wrote this\n # in a rush \n # TODO clean up the splitting code.\n \n puts \"INSIDE IF! line = \" + line \n\n tmp = line.split('!~')\n username = tmp[0]\n username = username.split('') # get rid of the ':'\n username.shift\n username = username.join \n\n tmp = tmp[1].split\n \n clienthost = tmp[0]\n action = tmp[1]\n channel = tmp[2]\n tmp.shift # get rid of the other two parts\n tmp.shift \n tmp.shift\n \n message = tmp.join(' ').gsub(':', '')\n \n # Form the query as string for better readability\n que = \"INSERT INTO rubicante_logs(timestamp, nickname, clienthost, action, channel, msg) VALUES ('\"\n que += dt.to_s + \"', '\"\n que += username + \"', '\"\n que += clienthost + \"', '\"\n que += action + \"', '\"\n que += channel + \"', '\" \n que += message + \"');\"\n \n # Perform the query using the MySQL object! \n @mResource.query(que)\n puts \"derp\" \n else \n puts \"Ignoring numeral \" + line.split[1] \n end\n end",
"def persist_message(params)\n message = params[:subscribable].messages.create(content: params[:message])\n params[:message_object] = message\n continue(params)\n end",
"def add_row( new_row, pos=@table.size)\n @table.insert(pos, new_row)\n end",
"def insert_takoble\n\t\tsql = \"insert into TAKO_List values (?, ?)\"\n\t\treturn sql\n\tend",
"def insert_data(test)\n is_singular = test['insert'].length == 1\n entity_or_entities = nil\n if is_singular\n entity_or_entities = 'entity'\n else\n entity_or_entities = 'entities'\n end\n puts \" Inserting #{test['insert'].length} #{entity_or_entities}\"\n\n insert_data = translate_column_names(test['insert'])\n if @verbose\n puts insert_data\n end\n\n @client.insert(insert_data)\n\n # Wait a few seconds so the data can be inserted by SlicingDice\n sleep @sleep_time\n end",
"def insert \n DATABASE.execute(\"INSERT INTO contents (slide_order, title, body) \n VALUES (#{@slide_order}, '#{@title}', '#{@body}')\")\n @id = DATABASE.last_insert_row_id\n end",
"def put_msg(action, options)\n\n msg = prepare_msg_doc(action, options)\n msg['put_at'] = Ruote.now_to_utc_s\n\n msg['_rev'] = 0\n # in case of msg replay\n\n collection(msg).insert(to_mongo(msg))\n end",
"def insert(index, string)\n execute_only(:insert, index, string)\n end",
"def enqueue_message(message); end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def write(str)\n $stderr.puts str if @console\n self.class.connection.collection(@collection_name).insert({:time => Time.now, :msg => str})\n end",
"def create_entry()\n\tdb.execute( \"INSERT INTO test (name, codename) VALUES (?, ?)\", [name, code_name])\n\nend",
"def insertar (id_usu_den,id_tip_doc,num_doc_den,fec_exp_doc_den,fec_nac_den,pri_nom_den,seg_nom_den,pri_ape_den,seg_ape_den,tel_den,dir_den,push_den,ema_den)\n \tputs \"insertar: \"\n \tprint \"INSERT INTO public.denunciante(\n id_usu_den, id_tip_doc, num_doc_den, fec_exp_doc_den, fec_nac_den, \n pri_nom_den, seg_nom_den, pri_ape_den, seg_ape_den, tel_den, \n dir_den, push_den, ema_den)\n VALUES (?, ?, ?, ?, ?, \n ?, ?, ?, ?, ?, \n ?, ?, ?);\"\n\n end",
"def insert()\n\t\t@db.execute(\"insert into ausgaben (jahr, monat, name, betrag, gemeinsam, tags) values(:jahr, :monat, :name, :betrag, :gemeinsam, :tags)\", @options)\n\tend",
"def update_message(data); end",
"def update_message(data); end",
"def message_sent(type, text, to = nil)\n msg = message_db.add(\n type: type,\n to: to,\n from: username,\n text: text,\n time: nil\n )\n signal_change(type, msg)\n end",
"def posted(msg, chat)\n @pgsql.exec('UPDATE telechat SET recent = $1 WHERE id = $2', [msg, chat])\n end",
"def add_lower_body(db, name, complete, user_id)\n\tcomplete = \"incomplete\"\n\tdb.execute(\"INSERT INTO lower_body (name, complete, user_id) VALUES (?,?,?)\", [name, complete, user_id])\n\tputs \"added leg workout\"\nend",
"def db_insert table, fields= {}\n #client = Mysql2::Client.new(:host => \"localhost\", :username => \"root\", :password => \"toor\", :database => \"filesorter\")\n @query= \"INSERT INTO `#{table}` (`#{fields.keys.join('`, `')}`) VALUES ('\"+fields.values.join(\"', '\")+\"');\"\n do_query\n end",
"def ins table, col, val\n pst = @con.prepare 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(?)'\n pst.execute s(val)\n #puts 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(' + s(val) + ')'\n end",
"def insert key, value\n begin\n db.putnr(key, value)\n rescue StandardError => e ; handle_error(\"Insert #{[key, value].inspect}\", e); end\n end",
"def insert(id, hash)\n @conn.execute(*@builder.insert(id, hash))\n end",
"def insert_article(id, body, s)\n if id == nil\n stmt = @sql.prepare \"insert into articles (original_article, article) values(?,?)\"\n stmt.execute(body, s)\n else\n stmt = @sql.prepare \"insert into articles (article_list_id, original_article, article) values(?,?,?)\"\n stmt.execute(id, body, s)\n end\n end",
"def insert(params)\n params[:real_at] ||= AtStructure.new(params[:at])\n\n if ['_space_', '_return_'].include?(params[:content])\n params.merge!(is_balise: true)\n end\n\n params.merge!(operation: 'insert') unless params.key?(:operation)\n # On ajoute si nécessaire le text-item de référence, qui permettra,\n # notamment, de renseigner les messages, de récupérer le file_id si c'est\n # un projet Scrivener, pour l'affecter aux nouveaux text-items et\n # d'enregistrer les messages d'opération.\n params.merge!(titem_ref: params[:real_at].first_titem) unless params.key?(:titem_ref)\n # Sauf si c'est une balise (*), on crée la simulation pour voir si on va vraiment faire\n # cete opération.\n # (*) Car on ne peut pas occasionner de proximités quand c'est une balise.\n unless params[:is_balise]\n new_titems = simulation(params.merge(debug: debug_insert?)) || return\n end\n\n if params[:operation] == 'insert'\n msg = \"Insertion de “#{params[:content]}” à l’index #{params[:real_at].abs(:at)} (avant “#{extrait_titems[params[:real_at].at].content}”)\"\n log(msg, true)\n end\n\n # :is_balise est true quand on donne '_space_' ou '_return_' comme texte\n unless params[:is_balise]\n # Si c'est une pure insertion, il faut ajouter une espace soit avant\n # soit après les nouveaux items. On l'ajoute après si le titem d'après\n # est un mot (.mot?) et on l'ajoute avant si le titem avant est un mot.\n if params[:operation] == 'insert'\n next_titem = extrait_titems[params[:real_at].at]\n prev_titem = extrait_titems[params[:real_at].first - 1]\n if next_titem && next_titem.mot? && new_titems.last.mot?\n # Dans le cas où l'item suivant existe, que c'est un mot, et que\n # le dernier titem à insérer est aussi un mot, il faut ajouter\n # une espace à la fin des nouveaux items.\n new_titems << NonMot.new(SPACE, type: 'space')\n elsif prev_titem && prev_titem.mot? && new_titems.first.mot?\n # Sinon, dans le cas où l'item précédent existe, que c'est un mot\n # et que le premier item à insérer est aussi un mot, il faut ajouter\n # une espace au début des nouveaux items\n new_titems.unshift(NonMot.new(SPACE, type:'space'))\n end\n end\n else\n new_item = case params[:content]\n when '_space_' then NonMot.new(SPACE, type:'space')\n when '_return_' then NonMot.new(RC, type:'paragraphe')\n end\n new_titems = [new_item]\n end\n # log(\"Nouveaux items ajoutés (#{new_titems.count}) : \")\n # log(new_titems.inspect)\n\n # Si c'est un projet Scrivener, il faut ajouter le file_id de l'item\n # de référence aux nouveaux items\n if itexte.projet_scrivener?\n new_titems.each {|titem| titem.file_id = params[:titem_ref].file_id}\n end\n\n # Insertion des nouveaux titems dans l'extrait\n extrait_titems.insert(params[:real_at].at, *new_titems)\n # Insertion des nouveaux titems dans la base de données\n start_abs_index = params[:real_at].abs(:at)\n new_titems.each_with_index { |i, idx| i.index = idx + start_abs_index }\n itexte.db.insert_text_items(new_titems)\n\n # Pour l'annulation (sauf si c'est justement une annulation)\n if params.key?(:cancellor)\n idx = params[:real_at].abs(:at)\n new_titems.each do |titem|\n content = titem.space? ? '_space_' : titem.content\n params[:cancellor] << {operation: :remove, index:idx, content:content}\n # Note : le content, ci-dessus, ne servira que pour la vérification\n end\n end\n\n # Si c'est vraiment une opération d'insertion, on l'enregistre\n # en tant qu'opération.\n # Noter qu'il faut le faire avant l'update suivant, sinon tous les\n # index et indices seront recalculés et donc faux.\n if params[:operation] == 'insert'\n itexte.operator.add_text_operation(params)\n end\n\n unless params[:noupdate]\n update\n end\nend",
"def insertar(texto,timestamp,usuario,fotousuario)\n\t\n\t\tlistaobjeto=[texto,timestamp,usuario,fotousuario]\n\t\t@Lista+=[listaobjeto]\n\tend",
"def insert_table(table, identifier, data)\n if existing_data = get_table(table, identifier)\n if existing_data.empty? || !existing_data.has_key?('.members')\n data.each { |key, value| existing_data[key] = value }\n return data\n else\n error_report \"Error in insert_table: Redefinition of #{identifier}\"\n raise ParseError\n end\n end\n\n table['table_data'].push([identifier, data])\n table['quick_look'][identifier] = 1\n return data\nend",
"def insert_table(table, identifier, data)\n if existing_data = get_table(table, identifier)\n if existing_data.empty? || !existing_data.has_key?('.members')\n data.each { |key, value| existing_data[key] = value }\n return data\n else\n error_report \"Error in insert_table: Redefinition of #{identifier}\"\n raise ParseError\n end\n end\n\n table['table_data'].push([identifier, data])\n table['quick_look'][identifier] = 1\n return data\nend",
"def insert\n # Preparing for the query...\n cols = self.class.columns\n col_names = cols.map(&:to_s).join(\", \")\n question_marks = ([\"?\"] * cols.count).join(\", \")\n \n # The actual query\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{ self.class.table_name } (#{ col_names })\n VALUES\n (#{ question_marks })\n SQL\n \n # Add an id number for the record\n self.id = DBConnection.last_insert_row_id\n end",
"def insert\n DATABASE.execute(\"INSERT INTO terms (term, definition, phonetic) VALUES \n ('#{@term}', '#{@definition}', '#{@phonetic}')\")\n @id = DATABASE.last_insert_row_id\n end",
"def send_message(message); end",
"def send_message(message); end",
"def checkifexists\n que = \" CREATE TABLE IF NOT EXISTS rubicante_logs ( \"\n que += \" id INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, \"\n que += \" timestamp DATETIME, \"\n que += \" nickname VARCHAR(31), \"\n que += \" clienthost VARCHAR(255), \"\n que += \" action VARCHAR(20), \"\n que += \" channel VARCHAR(100), \"\n que += \" msg TEXT \"\n que += \" );\"\n\n # Perform the query! \n @mResource.query(que) \n end",
"def send_message(message)\n # Set the table if they're not set.\n message.table_name = table_name if message.table_name.nil? || message.table_name.empty?\n\n # Validate message before sending.\n message.validate!\n \n # If this is part of a multi message then simply append the message for\n # later sending.\n if !@multi_message.nil? && @multi_message != message\n @multi_message.messages << message\n \n # Send off the MULTI if the message count is above our limit.\n if @multi_message_max_count > 0 && @multi_message.messages.length >= @multi_message_max_count\n send_message(@multi_message)\n @multi_message = SkyDB::Message::Multi.new()\n end\n \n return nil\n \n # Otherwise send the message immediately.\n else\n begin\n # Connect to the server.\n socket = TCPSocket.new(host, port.to_i)\n \n # Encode and send message request.\n message.encode(socket)\n \n # Retrieve the respose as a buffer so we can inspect it.\n #msg, x = *socket.recvmsg\n #buffer = StringIO.new(msg)\n #puts \"[#{message.message_name}]< #{buffer.string.to_hex}\" if SkyDB.debug\n \n # Decode msgpack response. There should only be one return object.\n response = nil\n unpacker = MessagePack::Unpacker.new(socket)\n unpacker.each do |obj|\n response = obj\n break\n end\n \n # Close socket.\n socket.close()\n \n # TODO: Exception processing.\n \n # Process response back through the message.\n response = message.process_response(response)\n \n # Return response.\n return response\n\n ensure\n # Make sure we remove the multi-message if that's what we're sending.\n @multi_message = nil if @multi_message == message\n end\n end\n end",
"def create\n @message = Message.new(message_params)\n create_q(@message)\n end",
"def insert(column, value = :no_value)\n @columns << column\n @values << Translate.escape_sql(value) unless value == :no_value\n end",
"def add_produce (db, name, serving_size, calories, protein, carbs, fat)\n db.execute(\"INSERT INTO produce (name, serving_size, calories, protein, carbs, fat) VALUES (?, ?, ?, ?, ?, ?)\", [name, serving_size, calories, protein, carbs, fat])\nend",
"def insert\n col_names = self.class.columns.join(\", \")\n question_marks = ([\"?\"] * self.class.columns.length).join(\", \")\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{self.class.table_name} (#{col_names})\n VALUES\n (#{question_marks})\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def message(message) end",
"def insert(db, volume_id, page_number, access_date, ip_token)\n command = \"insert into results values (?, ?, datetime(?), ?);\"\n db.execute(command, volume_id, page_number, access_date, ip_token)\n end",
"def save_ticket(ticket)\n stmt = \"INSERT INTO #{@@tickets_table} values(#{ticket.id}, '#{ticket.requester.to_json}', '#{ticket.status}', '#{ticket.subject}', '#{ticket.content}', '#{ticket.created_at}', '#{ticket.updated_at}', '#{ticket.comment}')\"\n execute_statement(stmt)\n end",
"def create(product, price, description, quantity)\n\t@conn.exec(\"INSERT INTO amazone (product, price, description, quantity) VALUES ('#{product}', '#{price}', '#{description}', '#{quantity}');\")\n\tputs \"I added your product to the table.\"\nend",
"def add_upper_body(db, name, complete, user_id)\n\tcomplete = \"incomplete\"\n\tdb.execute(\"INSERT INTO upper_body (name, complete, user_id) VALUES (?,?,?)\", [name, complete, user_id])\n\tputs \"added upper body workout\"\nend",
"def add_media(db, mediatype, title, rentLease, mediaconsumed)\r\n\tdb.execute('INSERT INTO media (mediatype, title, rentLease, mediaconsumed) VALUES (?, ?, ?, ?)', [mediatype, title, rentLease, mediaconsumed])\r\nend",
"def add_bot_message(message)\n messages.notification.create(sender_id: User.bot_id, body: message)\n end",
"def to_insert(output, table, row)\n columns = @columns[table].map {|i| i[0] }.join(',')\n values = map_values(row, @columns[table])\n output << \"INSERT INTO #{table} (#{columns}) VALUES (#{values});\\n\"\n end",
"def add_task(db, table_name, name, time, importance)\n\tdb.execute(\"INSERT INTO #{table_name} (task_name, task_time, importance) VALUES (?, ?, ?)\", [name, time, importance])\nend",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def store( id, text )\n lib.tcidbput( @db, id, text ) || raise_error\n end",
"def create\n @message = Message.new(message_params)\n @message.user = current_user\n if @message.save\n #ActionCable.server.broadcast \"messages_#{params[:authid]}\", message: @message.content, user: current_user.name\n head :ok\n else\n render :edit\n end\n end",
"def insert_table dataset_id, new_table_gapi\n execute { service.insert_table @project, dataset_id, new_table_gapi }\n end",
"def send_sms(to_phone_number, message)\n # message = {to:, from:, body: \"Hello\\n\\nWorld\"}\n # binding.pry\n @@client.messages.create(\n from: @@from,\n to: to_phone_number,\n body: message\n )\n end",
"def add_reciever(db, name, gift, cost)\n db.execute(\"INSERT INTO personal_gift (name, gift, cost, purchased_id) VALUES (?, ?, ?, 2)\", [name, gift, cost])\nend",
"def send_message(_user_id, message, extra_data = {})\n messages.create!({sender_id: _user_id, body: message}.merge(extra_data))\n end",
"def handle_insert action, result; end"
] | [
"0.69987446",
"0.68026465",
"0.6523364",
"0.62094504",
"0.6206268",
"0.6183485",
"0.6135957",
"0.61356354",
"0.60963815",
"0.60535014",
"0.60174793",
"0.592762",
"0.5897302",
"0.589414",
"0.5870947",
"0.5870938",
"0.5868112",
"0.58478814",
"0.5839603",
"0.5834173",
"0.57848495",
"0.57592005",
"0.5753206",
"0.5712742",
"0.57074857",
"0.5683505",
"0.5675975",
"0.5673946",
"0.5673946",
"0.56707555",
"0.5669799",
"0.5638461",
"0.563655",
"0.56063855",
"0.5600539",
"0.5600539",
"0.5598164",
"0.5593844",
"0.5575813",
"0.55622375",
"0.5553833",
"0.5544804",
"0.5544804",
"0.55379754",
"0.5512311",
"0.5495072",
"0.54787016",
"0.5475225",
"0.5469868",
"0.5461787",
"0.54616207",
"0.54587805",
"0.5449585",
"0.5444327",
"0.5435911",
"0.54283226",
"0.5427783",
"0.5423418",
"0.5420106",
"0.54150873",
"0.5399046",
"0.5399046",
"0.53956133",
"0.5395513",
"0.53771734",
"0.53736943",
"0.53694135",
"0.5354209",
"0.5351416",
"0.53479713",
"0.53478456",
"0.5346584",
"0.53445566",
"0.53445566",
"0.5342939",
"0.5341663",
"0.53329134",
"0.53329134",
"0.5327749",
"0.5323172",
"0.5310867",
"0.53106666",
"0.5310286",
"0.5303562",
"0.53029984",
"0.52965885",
"0.5288909",
"0.52871656",
"0.5286705",
"0.5286062",
"0.52800435",
"0.52772534",
"0.52736014",
"0.52676094",
"0.5259902",
"0.52578807",
"0.52566314",
"0.52559185",
"0.5254648",
"0.5251345",
"0.5248964"
] | 0.0 | -1 |
delete a table message or single message | def single_delete(table_name, id=nil)
status = true
begin
sql = "DELETE FROM #{table_name}"
sql << "WHERE id = #{id}" unless id.nil?
@mysql_client.query(sql)
rescue
status = false
end
return status
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_message(data); end",
"def delete_message(data); end",
"def destroy\n # delete a specific message\n end",
"def destroy\n # delete a specific message\n end",
"def destroy\n # delete a specific message\n end",
"def delete_message(id)\n record \"/msg/delete/#{id}\"\n end",
"def destroy\n get_message().delete\n end",
"def delete\n @client.post('/api/del_msg', id: get_attribute(:name))\n end",
"def messagedelete\n TDirmessage.find_by(\"dirmsg_id=?\", params[:messagedelete]).delete\n redirect_back(fallback_location: dirmsgcreate_path)\n end",
"def delete_msg()\n MsgUtil.delete_msg(params[:ch])\n end",
"def destroy\n #@message = Message.find(params[:id])\n #@message.destroy\n #redirect_to projects_path\n current_user.delete_message(ActsAsMessageable::Message.find(params[:id]))\n redirect_to trash_url\n end",
"def delete_item(message_id)\n resp = @dynamoDB.delete_item({\n table_name: \"Messages\", # required\n key: { # required\n \"message_id\" => message_id, # value <Hash,Array,String,Numeric,Boolean,IO,Set,nil>\n }\n })\nend",
"def delete_message(display_id, message_id)\n delete \"commandcenter/displays/#{display_id}/messages/#{message_id}\"\n end",
"def destroy\n @message.destroy\n end",
"def destroy\n message = Message.find(params[:id])\n message.destroy\n end",
"def destroy\n @message = Messages.find(params[:id])\n @message.destroy\n end",
"def delete\n CONNECTION.execute(\"DELETE FROM '#{tablename}' WHERE id = ?;\", @id)\n \"Deleted.\"\n end",
"def delete_message(message)\n client.delete_message(\n queue_url: @job_queue.queue_url,\n receipt_handle: message.receipt_handle,\n )\n end",
"def message_delete\n messages = params[:mesg_ids]\n if messages.present?\n messages.each do |f|\n mesg = Message.find(f)\n mesg.update_columns(:is_deleted => 1 )\n flash[:success] = \"You have successfully Deleted the Message\"\n end\n else\n flash[:danger] = \"Please select checkbox to DELETE the messages\"\n end\n redirect_to :back\n end",
"def message_delete(id)\n request(:delete, \"messages/#{id.to_s}\")\n end",
"def delete_message(message)\n client.delete_message(\n queue_url: @job_queue.queue_url,\n receipt_handle: message.receipt_handle,\n )\n end",
"def destroy\n @user_message.destroy\n end",
"def destroy\n @message.destroy\n respond_to do |format|\n format.html { redirect_to administrators_messages_path(@message), notice: 'Новость удалена.' }\n end\n end",
"def delete(message_text)\n self.span(:class=>\"discussion_post_message\", :text=>message_text).parent.parent.button(:text=>\"Delete\").click\n end",
"def destroy\n @message.destroy\n \n flash[:success] = \"Your message was successfully deleted.\"\n redirect_to @message\n end",
"def destroy\n @message.destroy\n redirect_to action: :index\n end",
"def delete\n @service.delete_message(self)\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n redirect_to(:action => \"index\")\n end",
"def destroy\n \t\n @message = Message.find(params[:id])\n @message.delete\n\n respond_to do |format|\n format.html { redirect_to(messages_url) }\n format.xml { head :ok }\n end\n end",
"def delete(key)\n messages.delete(key)\n end",
"def delete_message(queue_name, message_id, pop_receipt)\r\n execute :delete, \"#{queue_name}/messages/#{message_id}\", { :pop_receipt => pop_receipt }\r\n end",
"def delete_request(req)\n req[:sqs_msg].delete\n end",
"def destroy\n @message = Message.find(params[:id])\n\t ent = @message.theme if @message.theme != nil\n\t ent = @message.photo if @message.photo != nil\n\t ent = @message.photo_album if @message.photo_album != nil\n ent = @message.video if @message.video != nil\n \tif userCanDeleteMessage?(@message)\n theme = @message.theme\n \t\tif is_admin?\n \t\t\tif @message.status_id != 1\n \t\t\t\t@message.destroy\n \t\t\telse\n \t\t\t\t@message.update_attribute(:status_id, 2)#@message.set_as_delete\n \t\t\tend \n \t\telse\n \t\t\t@message.destroy\n \t\tend\n theme.last_msg_upd if !theme.nil?\n \t\trespond_to do |format|\n \t\t format.html { redirect_to ent }\n \t\t format.json { head :no_content }\n \t\tend\n \telse\n \t\tredirect_to '/404'\n \tend\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.mark_deleted(current_user)\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.make_deleted(current_user.id)\n response_message = {:message => 'Message deleted successfully.'}\n respond_to do |format|\n format.xml { render :xml => response_message }\n format.json { render :json => response_message }\n end\n end",
"def delete(message_id)\n Mailgun.submit :delete, messages_url(message_id)\n end",
"def delete()\n sql = \"DELETE FROM tickets WHERE id=#{@id};\"\n SqlRunner.run(sql)\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n render json: 'Message was successfully deleted', status: :ok\n end",
"def destroy\n @admin_message.destroy \n respond_to do |format|\n format.html { redirect_to admin_messages_url, notice: \"#{ t 'activerecord.successful.messages.message_deleted' }\" }\n format.json { head :no_content }\n end\n end",
"def delete\n table = self.class.to_s.pluralize.underscore\n\n DATABASE.execute(\"DELETE FROM #{table} WHERE id = #{@id};\")\n end",
"def delete_table(table, options={})\n return send_message(SkyDB::Message::DeleteTable.new(table, options))\n end",
"def delete_message_template(id)\n @client.raw('delete', \"/content/message-templates/#{id}\")\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n head :no_content\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n head :no_content\n end",
"def delete_message(token, id, user = nil)\n request_url = user_context(user) << \"/Messages/\" << id\n\n delete_response = make_api_call \"DELETE\", request_url, token\n\n return nil if delete_response.nil? || delete_response.empty?\n\n parse_response(delete_response)\n end",
"def destroy\n @message = Admin::Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_messages_url) }\n format.xml { head :ok }\n end\n end",
"def delete\n \n end",
"def delete(key)\n messages.delete(key)\n end",
"def delete(key)\n messages.delete(key)\n end",
"def delete\n\n DB.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def delete_message(message_id)\n RestClient.delete(\"#{@url}/messages/#{message_id}\")\n end",
"def remove_records(messages)\n clause = deleted_query(messages)\n\n clause.delete_all\n end",
"def delete(table, ids) # abstract\n end",
"def destroy\n @user = User.find(current_user)\n @message = @user.messages.find(params[:id])\n @message.destroy\n\n end",
"def destroy\n # don't really destroy\n @topic = Topic.find(params[:topic_id])\n @forum = Forum.find(params[:forum_id])\n\n # mark as deleted with default reason\n @message.deleted = true\n @message.deletion_reason = \"Message deleted\"\n @message.deletion_by = current_user.id\n\n @message.save\n\n respond_to do |format|\n format.html { redirect_to forum_topic_path(@forum, @topic) }\n format.json { head :no_content }\n end\n end",
"def delete\n # Figure out the table's name from the object we're calling the method on.\n table_name = self.class.to_s.pluralize.underscore\n CONNECTION.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\") # need to see if this one will work, if not look up.\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url, notice: 'message.deleted' }\n format.json { head :no_content }\n end\n end",
"def delete\n sql = \"DELETE FROM tickets WHERE id = $1\"\n values = [id]\n SqlRunner.run(sql, values)\n end",
"def destroy\n destroy_q(@message, messages_url)\n end",
"def destroy\n @message.destroy\n respond_to do |format|\n format.html { redirect_to success_path, notice: _('Message deleted') }\n format.json { head :no_content }\n end\n end",
"def delete(msg)\n read_write()\n uid = (msg.kind_of?(EasyIMAP::Message) ? msg.uid : msg)\n @conn.uid_store(uid, '+FLAGS', [:Deleted])\n expunge()\n end",
"def destroy\n @message = Message.find(params[:id])\n respond_to do |format|\n if params[:attempt].eql?(@message.answer)\n @message.destroy\n @messages = Message.all\n format.html { redirect_to messages_url, notice: 'Message was successfully deleted.' }\n else\n format.html { redirect_to @message, notice: 'your answer is wrong, unable to delete the post' }\n end\n end\n end",
"def destroy\n @v1_message = V1::Message.find(params[:id])\n @v1_message.destroy\n\n head :no_content\n end",
"def delete()\n sql = \"DELETE FROM transactions\n WHERE id = $1\"\n values = [@id]\n SqlRunner.run(sql, values)\nend",
"def delete_message(sid)\n\t@client = Twilio::REST::Client.new ACCOUNT_SID, AUTH_TOKEN\n\t@client.recordings.get(sid).delete()\nend",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :no_content }\n end\n end",
"def del_message # for user it is delte but in db it change status to false\t\n\t\tmid = Base64.decode64(params[:id])\t\t\t \n\t\ttype = params[:type]\t\t\t \n\t\tmessage = Message.find(mid)\t\n\t\tif params[:type]=='self' \n\t\t\tmessage.update_attributes(status: false)\n\t\telse\n\t\t\tmessage.update_attributes(recipient_status: false)\n\t\tend\n\t\t\t\n\t if message \n\t\t render :json => mid and return \n\t else\n\t\t render :json => {errors: \"Please Try Again!\"} and return\n\t end\t\n\tend",
"def message_destroy(id)\n post(\"/direct_messages/destroy/#{id}.json\")\n end",
"def message_destroy(id)\n post(\"/direct_messages/destroy/#{id}.json\")\n end",
"def destroy\n \tif @message = Message.related(current_user.id).find(params[:id])\n\n if @message.destroy\n flash[:notice] = \"Message deleted.\"\n else\n flash[:error] = \"Message could not deleted.\"\n end\n\n else\n flash[:error] = \"Message could not be deleted. You are not the owner of the message.\"\n end\n redirect_to messages_path\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n respond_to do |format|\n format.html { redirect_to noticias_messages_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @send_message = SendMessage.find(params[:id])\n @send_message.destroy\n\n respond_to do |format|\n format.html { redirect_to(send_messages_url) }\n format.xml { head :ok }\n end\n end",
"def delete_message_by_restaurant\n root_id = @parsed_json[\"message_id\"] if @parsed_json[\"message_id\"]\n check = Notifications.where(\"id=?\", root_id.to_i).first\n if check.nil?\n render :status=>412, :json=>{:status=>:failed, :error=>\"Not exist this message\"}\n else\n sql =\"notifications.alert_type != 'Publish Menu Notification' AND notifications.id=? OR notifications.reply=?\"\n notifications = Notifications.where(sql, root_id.to_i,root_id.to_i)\n for i in notifications\n i.update_attributes(:is_show=>0, :is_show_detail=>0)\n end\n render :status=>200, :json=>{:status=>:success}\n end\n end",
"def delete\n CONNECTION.execute(\"DELETE FROM logs WHERE id = #{self.id};\")\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n redirect_to(:back) \n\n end",
"def delete_message(message)\n API::Channel.delete_message(@bot.token, @id, message.resolve_id)\n end",
"def delete\n raise \"'id' is not set.\" if @id == nil\n sql = \"DELETE FROM #{table} WHERE id=#{@id}\"\n Database.transaction(sql)\n @log.debug \"Record[#{self}] is deleted from Table[#{table}]\"\n end",
"def delete\n end",
"def destroy\n @user = User.find_by_login(params[:user_id])\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to(messages_url) }\n format.xml { head :ok }\n end\n end",
"def delete (table_name, record_id)\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{record_id}\")\n end",
"def destroy\n @message.destroy\n \n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :no_content }\n end\n end",
"def delete\n end",
"def delete\n DB.exec(\"DELETE FROM line WHERE id = #{self.id};\")\n end",
"def destroy\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url, notice: 'Message was successfully deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n authorize_admin or return\n\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to(messages_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @admin_message = AdminMessage.find(params[:id])\n @admin_message.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_messages_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @msg.destroy\n respond_to do |format|\n format.html { redirect_to msgs_url }\n format.json { head :no_content }\n end\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.xml { head :ok }\n end\n end",
"def delete(sql, name = nil) end",
"def delete_message(message_title)\n full_path = \"#{full_queue_path}/#{message_title}\"\n locker = @zk.locker(\"#{full_queue_path}/#{message_title}\")\n if locker.lock!\n begin\n @zk.delete(full_path)\n return true\n ensure\n locker.unlock!\n end\n else\n return false\n end\n end",
"def remove_message(name)\n\t\tend",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @message = Message.find(params[:id])\n @message.destroy\n\n respond_to do |format|\n format.html { redirect_to messages_url }\n format.json { head :ok }\n end\n end",
"def destroy(table)\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def destroy\n @message.destroy\n respond_to do |format|\n format.html { redirect_to messages_url, notice: 'El Mensaje ha sido eliminado correctamente!' }\n format.json { head :no_content }\n end\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend",
"def destroy\n @message.destroy\n respond_to do |format|\n format.html { redirect_to admin_messages_url, notice: 'Message was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] | [
"0.7788787",
"0.7788787",
"0.7491494",
"0.7476153",
"0.74711",
"0.7355781",
"0.7272073",
"0.7231599",
"0.70730984",
"0.69947916",
"0.69696033",
"0.69531834",
"0.6899367",
"0.6844329",
"0.6842122",
"0.6702027",
"0.6698341",
"0.6674545",
"0.66705793",
"0.66674906",
"0.66562545",
"0.6610867",
"0.65921754",
"0.65553963",
"0.6550694",
"0.6527086",
"0.65199",
"0.6490011",
"0.6487672",
"0.64797956",
"0.6475439",
"0.6469845",
"0.64478827",
"0.64458054",
"0.64299524",
"0.6429531",
"0.6419405",
"0.64093673",
"0.6396366",
"0.6391045",
"0.63900584",
"0.6385161",
"0.63825744",
"0.6381342",
"0.63806546",
"0.63762754",
"0.6372479",
"0.63656586",
"0.63656586",
"0.63617367",
"0.6359893",
"0.6359208",
"0.6350221",
"0.63466007",
"0.63448477",
"0.63166845",
"0.63110626",
"0.63055825",
"0.6296944",
"0.6273278",
"0.6268838",
"0.6251232",
"0.6248955",
"0.62463415",
"0.62444437",
"0.6219991",
"0.62193763",
"0.6215355",
"0.6215355",
"0.62093955",
"0.6208507",
"0.6204391",
"0.6203068",
"0.6199444",
"0.61975044",
"0.6194285",
"0.6169116",
"0.61689407",
"0.616848",
"0.616586",
"0.616466",
"0.6156915",
"0.6154384",
"0.6153367",
"0.61494845",
"0.6147197",
"0.6147125",
"0.61434835",
"0.6137437",
"0.6135131",
"0.6125706",
"0.6125623",
"0.61198163",
"0.61198163",
"0.61198163",
"0.61198163",
"0.61196065",
"0.61173856",
"0.61156106",
"0.6113361",
"0.611132"
] | 0.0 | -1 |
Endpoint /images exists on both v1 and v2 API The attribute 'visibility' is used to detect if the call has been made on v1 or v2 In case of v2 we have all the needed information, but in case of v1 we don't and we have to call /images/detail to get full details | def get_all_images(env)
images_json = get(env, "#{@session.endpoints[:image]}/images")
images = JSON.parse(images_json)['images']
return images if images.empty?
is_v1 = false
unless images[0].key? 'visibility'
is_v1 = true
images_json = get(env, "#{@session.endpoints[:image]}/images/detail")
images = JSON.parse(images_json)['images']
end
images.map do |i|
i['visibility'] = i['is_public'] ? 'public' : 'private' if is_v1
Image.new(i['id'], i['name'], i['visibility'], i['size'], i['min_ram'], i['min_disk'])
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def images() \n uri = URI.parse(\"http://\" + @location.host + \":9292/v2/images\")\n return get_request(uri, @token)\n end",
"def marketplace_image_my_images_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VnfsApi.marketplace_image_my_images ...'\n end\n # resource path\n local_var_path = '/1.0.0/marketplace/image/my_images/'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'InlineResponse2009')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VnfsApi#marketplace_image_my_images\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def status_images\n response = JSON.parse(@client.get(\"/api/v1/status-images\").body)\n return response[\"images\"] || response\n end",
"def get_images\n @command = :get_images\n # set a flag indicating whether or not the user wants to see all images,\n # including the hidden ones\n show_hidden = (@prev_args.peek(0) == \"-i\" || @prev_args.peek(0) == \"--hidden\")\n # get the images from the RESTful API (as an array of objects)\n uri_str = ( show_hidden ? \"#{@uri_string}?hidden=true\" : @uri_string )\n uri = URI.parse uri_str\n result = hnl_http_get(uri)\n unless result.blank?\n # convert it to a sorted array of objects (from an array of hashes)\n sort_fieldname = 'filename'\n result = hash_array_to_obj_array(expand_response_with_uris(result), sort_fieldname)\n end\n # and print the result\n print_object_array(result, \"Images:\", :style => :table)\n end",
"def list\n @api.get(\"#{@api.path}/Images\")\n end",
"def images\n IbmCloudRest.get \"#{@uri}/images\"\n end",
"def images(params = {})\n @api.get(\"#{@api.path}/List/#{@id}/Images\", params: params)\n end",
"def images\n @images ||= Image.find_all_by_listing_id(listing_id, oauth)\n end",
"def get_images\n images = collect_inventory(:private_images) { gather_data_for_this_region(@sas, 'list_all_private_images') }\n rescue ::Azure::Armrest::ApiException => err\n _log.warn(\"Unable to collect Azure private images for: [#{@ems.name}] - [#{@ems.id}]: #{err.message}\")\n else\n process_collection(images, :vms) { |image| parse_image(image) }\n end",
"def list_public_virtual_machine_images\n request_path = '/services/images'\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.virtual_machine_images_from_xml(response)\n end",
"def show_image # :nologin: :prefetch:\n store_location\n if @image = find_or_goto_index(Image, params[:id].to_s, :include => [\n :copyright_changes,\n :image_votes,\n :license,\n {:observations => [:comments, :name]},\n :projects,\n :user,\n ])\n @is_reviewer = is_reviewer\n pass_query_params\n\n # Decide which size to display.\n @default_size = @user ? @user.image_size : :medium\n @size = params[:size].blank? ? @default_size : params[:size].to_sym\n\n # Make this size the default image size for this user.\n if @user and (@default_size != @size) and\n (params[:make_default] == '1')\n @user.image_size = @size\n @user.save_without_our_callbacks\n @default_size = @size\n end\n\n # Wait until here to create this search query to save server resources.\n # Otherwise we'd be creating a new search query for images for every single\n # show_observation request. We know we came from an observation-type page\n # because that's the only time the 'obs' param will be set (with obs id).\n obs = params[:obs]\n if !obs.blank? &&\n # The outer search on observation won't be saved for robots, so no sense\n # in bothering with any of this.\n !is_robot?\n obs_query = find_or_create_query(:Observation)\n obs_query.current = obs\n img_query = create_query(:Image, :inside_observation,\n :observation => obs, :outer => obs_query)\n set_query_params(img_query)\n end\n\n # Cast user's vote if passed in 'vote' parameter.\n if @user and\n (val = params[:vote]) and\n (val == '0' or (val = Image.validate_vote(val)))\n val = nil if val == '0'\n cur = @image.users_vote\n if cur != val\n anon = @user.votes_anonymous == :yes\n @image.change_vote(@user, val, anon)\n Transaction.put_images(:id => @image, :set_vote => val, :set_anonymous => anon)\n end\n\n # Advance to next image automatically if 'next' parameter set.\n if params[:next]\n query = find_or_create_query(Image)\n query.current = @image\n if query.index(@image) and\n (query = query.next)\n @image = query.current\n end\n end\n end\n\n # Grab list of votes.\n @votes = @image.image_votes(:include => :user).sort_by do |v|\n (v.anonymous ? :anonymous.l : v.user.unique_text_name).downcase\n end\n\n # Update view stats on image we're actually showing.\n update_view_stats(@image)\n end\n end",
"def list_images # :nologin:\n query = create_query(:Image, :all, :by => :created_at)\n show_selected_images(query)\n end",
"def images\n @images ||= ApiFactory.new 'Projects::Images'\n end",
"def marketplace_image_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VnfsApi.marketplace_image ...'\n end\n # resource path\n local_var_path = '/1.0.0/marketplace/image'\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'InlineResponse2009')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VnfsApi#marketplace_image\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def search_for_images\n request = {\n :RequestHeader => { :Token => @authenticator.token},\n :SearchForImages2RequestBody => {\n :Query => { :SearchPhrase => @search_query},\n :ResultOptions => {\n :ItemCount => @max_num_images,\n :EditorialSortOrder => 'MostPopular'\n },\n :Filter => {\n :ImageFamilies => [\"editorial\"],\n :GraphicStyles => [\"Photography\"]\n }\n }\n }\n response = post_json(request)\n if response[\"ResponseHeader\"][\"Status\"]\n response[\"SearchForImagesResult\"][\"Images\"]\n else\n raise \"No images returned #{response['ResponseHeader']['Status']}\"\n end\n end",
"def images\n do_client.images.all.select do |image|\n image.public == false && image.type.casecmp('snapshot').zero?\n end.sort_by(&:id).reverse\n rescue DropletKit::Error => e\n e\n end",
"def images\n @images ||= aws_client.images(owners: ['self'], filters: image_filters).map do |image|\n OpenStruct.new(\n with_tags(image, image_id: image.image_id,\n type: image.image_type,\n public: image.public,\n created_at: image.creation_date)\n )\n end\n end",
"def fetch_thumbnail_visibility\n response = Blacklight.default_index.connection.get 'select', params: { q: \"id:#{identifier}\" }\n visibility = response[\"response\"][\"docs\"][0][\"visibility_ssi\"]\n return visibility if visibility.present?\n [\"restricted\"]\n rescue\n [\"restricted\"]\n end",
"def images()\n\t\treturn Images.new(@credentials.client_key, @credentials.get_access_token)\n\tend",
"def images\n if @group.is_member?(@user)\n @images = @group.uploads.images.paginate(:page => @page, :per_page => @per_page, :order => 'created_at desc')\n else\n @images = @group.uploads.images.public.paginate(:page => @page, :per_page => @per_page, :order => 'created_at desc')\n end\n respond_to do |format|\n format.js { render :json => basic_uploads_json(@images) }\n end\n end",
"def images\n response = JSON.parse( self.class.get(\"#{BASE_URL}/contest/#{@api_key}/images\") )\n end",
"def show\n @image = Image.find(params[:id])\n checkaccountobject(\"images\",@image)\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show_image\n store_location\n @image = find_or_goto_index(Image, params[:id].to_s)\n return false unless @image\n\n pass_query_params\n\n # Decide which size to display.\n @default_size = @user ? @user.image_size : :medium\n @size = params[:size].blank? ? @default_size : params[:size].to_sym\n\n # Make this size the default image size for this user.\n if @user && (@default_size != @size) && (params[:make_default] == \"1\")\n @user.image_size = @size\n @user.save_without_our_callbacks\n @default_size = @size\n end\n\n # Wait until here to create this search query to save server resources.\n # Otherwise we'd be creating a new search query for images for every single\n # show_observation request. We know we came from an observation-type page\n # because that's the only time the \"obs\" param will be set (with obs id).\n obs = params[:obs]\n if obs.present? && obs.to_s.match(/^\\d+$/) &&\n # The outer search on observation won't be saved for robots, so no sense\n # in bothering with any of this.\n !browser.bot?\n obs_query = find_or_create_query(:Observation)\n obs_query.current = obs\n img_query = create_query(:Image, :inside_observation,\n observation: obs, outer: obs_query)\n query_params_set(img_query)\n end\n\n # Cast user's vote if passed in \"vote\" parameter.\n if @user &&\n (val = params[:vote]) &&\n (val == \"0\" || (val = Image.validate_vote(val)))\n val = nil if val == \"0\"\n cur = @image.users_vote\n if cur != val\n anon = @user.votes_anonymous == :yes\n @image.change_vote(@user, val, anon: anon)\n end\n\n # Advance to next image automatically if \"next\" parameter set.\n if params[:next]\n query = find_or_create_query(Image)\n query.current = @image\n @image = query.current if query.index(@image) && (query = query.next)\n end\n end\n\n # Grab list of votes.\n @votes = @image.image_votes.sort_by do |v|\n (v.anonymous ? :anonymous.l : v.user.unique_text_name).downcase\n rescue StandardError\n \"?\"\n end\n\n # Update view stats on image we're actually showing.\n update_view_stats(@image)\n end",
"def get_image(image_id)\n request(\n :expects => 200,\n :method => 'GET',\n :parser => Fog::Parsers::Compute::Slicehost::GetImage.new,\n :path => \"images/#{image_id}.xml\"\n )\n end",
"def images\n images = []\n JSON.parse(resource['/offerings/image'].get)[\"images\"].each do |img|\n images << Image.new(img)\n end\n return images\n end",
"def show\n @image = @user.images.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def show\n @image = @account.images.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def evaluate_visibility\n case visibility\n when \"open\", \"low_res\"\n send_image\n when \"authenticated\", \"emory_low\" # authenticated is also called \"Emory High Download\"\n return head :forbidden unless valid_cookie?\n send_image\n when \"restricted\"\n head :forbidden\n when \"rose_high\"\n return head :forbidden unless user_ip_rose_reading_room?\n send_image\n else\n head :forbidden\n end\n end",
"def images\n @picturesandmeta = Pictureandmeta.all\n @kind = Kind.find(params[:kind_id])\n Rails.logger.info(\"Kind: #{@kind.inspect}\")\n end",
"def show\n if @image then \n if stale?(:last_modified => @object.updated_at.utc, :etag => @object)\n respond_to do |format|\n format.html { render :action => :edit unless File.exist?(view_path) }\n format.xml { render :xml => @object.to_xml }\n format.any { send(\"show_#{params[:format]}\") } if respond_to?(\"show_#{params[:format]}\") \n end\n end\n else\n show_jpg\n end\n end",
"def images()\n @photos = all_photos() \n @headers['Content-Type'] = CONTENT_TYPE\n end",
"def index\n @images = Image.all\n respond_with @images\n end",
"def list_virtual_machine_images(imageType=:all)\n images = Array.new\n if imageType == :public or imageType == :all\n public_images = list_public_virtual_machine_images\n images.concat public_images\n end\n\n if imageType == :private or imageType == :all\n private_images = list_private_virtual_machine_images\n images.concat private_images\n end\n\n images\n end",
"def list_image\n attrcheck = { 'compartment' => @options[:compartment] }\n @validate.validate(@options, attrcheck)\n opts = {}\n opts[:availability_domain] = @options[:availability_domain] if @options[:availability_domain]\n opts[:display_name] = @options[:display_name] if @options[:display_name]\n BmcAuthenticate.new(@options)\n request = OracleBMC::Core::ComputeClient.new\n request = request.list_images(@options[:compartment], opts)\n request.data\n end",
"def images\n Vultr::Resource::OS.new(@faraday)\n end",
"def get_images\n {}\n end",
"def marketplace_image_imageid_with_http_info(imageid, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: VnfsApi.marketplace_image_imageid ...'\n end\n # verify the required parameter 'imageid' is set\n if @api_client.config.client_side_validation && imageid.nil?\n fail ArgumentError, \"Missing the required parameter 'imageid' when calling VnfsApi.marketplace_image_imageid\"\n end\n # resource path\n local_var_path = '/1.0.0/marketplace/image/{imageid}/'.sub('{' + 'imageid' + '}', imageid.to_s)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['oAuth2']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Image')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: VnfsApi#marketplace_image_imageid\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def print_images_list project:\n client = ::Google::Cloud::Compute::V1::Images::Rest::Client.new\n\n # Make the request to list all non-deprecated images in a project.\n request = {\n project: project,\n # max_results indicates the maximum number of items that will be returned per page.\n max_results: 100,\n # Listing only non-deprecated images to reduce the size of the reply.\n filter: \"deprecated.state != DEPRECATED\"\n }\n\n # Although the `max_results` parameter is specified in the request, the iterable returned\n # by the `list` method hides the pagination mechanic. The library makes multiple\n # requests to the API for you, so you can simply iterate over all the images.\n client.list(request).each do |image|\n puts \" - #{image.name}\"\n end\nend",
"def get_presentation_images_with_http_info(name, password = nil, folder = nil, storage = nil)\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SlidesApi.get_presentation_images ...'\n end\n\n # verify the required parameter 'name' is set\n if @api_client.config.client_side_validation && name.nil?\n fail ArgumentError, \"Missing the required parameter 'name' when calling SlidesApi.get_presentation_images\"\n end\n # resource path\n local_var_path = '/slides/{name}/images'\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'name', name)\n\n # query parameters\n query_params = {}\n query_params[:'folder'] = @api_client.prepare_for_query(folder) unless folder.nil?\n query_params[:'storage'] = @api_client.prepare_for_query(storage) unless storage.nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n header_params[:'password'] = password unless password.nil?\n\n # http body (model)\n post_body = nil\n\n # form parameters\n post_files = []\n\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :body => post_body,\n :files => post_files,\n :auth_names => auth_names,\n :return_type => 'Images')\n return data, status_code, headers\n end",
"def describe_images()\n begin\n response = @ec2_connection.describe_images\n rescue Exception => e\n error = CloudClient::Error.new(e.message)\n return error\n end\n \n return response\n end",
"def index\n @images = @owner.images\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end",
"def list_images\n if params[:page].to_s.to_i > 1000\n render(\n status: :too_many_requests,\n content_type: \"text/plain\",\n plain: \"Your queries are killing our server. \" \\\n \"There are much better ways to scrape the images \" \\\n \"from our site. \" \\\n \"Please contact the webmaster.\" \\\n \"And please stop hammering our server!\"\n )\n return\n end\n\n query = create_query(:Image, :all, by: :created_at)\n show_selected_images(query)\n end",
"def show_images\r\n images = Document.new(self).get_images\r\n puts \"There are #{images.length} images\"\r\n index = 1\r\n images.each do |l|\r\n puts \"image: name: #{l.name}\"\r\n puts \" id: #{l.id}\"\r\n puts \" src: #{l.src}\"\r\n puts \" index: #{index}\"\r\n index += 1\r\n end\r\n end",
"def rover_images(roverName, **parameters)\n uri = URI(\"#{@rover_url}#{roverName}/photos.html\")\n parameters[:api_key] = @api_key\n uri.query = URI.encode_www_form(parameters)\n response = Net::HTTP.get_response(uri)\n\n return [] unless response.is_a?(Net::HTTPSuccess) # if requests fails, returns empty array\n\n JSON.parse(response.body)['photos']\n end",
"def download_image_with_http_info(name, index, format, password = nil, folder = nil, storage = nil)\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SlidesApi.download_image ...'\n end\n\n # verify the required parameter 'name' is set\n if @api_client.config.client_side_validation && name.nil?\n fail ArgumentError, \"Missing the required parameter 'name' when calling SlidesApi.download_image\"\n end\n # verify the required parameter 'index' is set\n if @api_client.config.client_side_validation && index.nil?\n fail ArgumentError, \"Missing the required parameter 'index' when calling SlidesApi.download_image\"\n end\n # verify the required parameter 'format' is set\n if @api_client.config.client_side_validation && format.nil?\n fail ArgumentError, \"Missing the required parameter 'format' when calling SlidesApi.download_image\"\n end\n # verify enum value\n if @api_client.config.client_side_validation && !['Jpeg', 'Png', 'Gif', 'Bmp', 'Tiff'].any?{ |s| s.casecmp(format)==0 }\n fail ArgumentError, \"Invalid value for parameter format: \" + format + \". Must be one of Jpeg, Png, Gif, Bmp, Tiff\"\n end\n # resource path\n local_var_path = '/slides/{name}/images/{index}/{format}'\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'name', name)\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'index', index)\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'format', format)\n\n # query parameters\n query_params = {}\n query_params[:'folder'] = @api_client.prepare_for_query(folder) unless folder.nil?\n query_params[:'storage'] = @api_client.prepare_for_query(storage) unless storage.nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['multipart/form-data'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n header_params[:'password'] = password unless password.nil?\n\n # http body (model)\n post_body = nil\n\n # form parameters\n post_files = []\n\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :body => post_body,\n :files => post_files,\n :auth_names => auth_names,\n :return_type => 'File')\n return data, status_code, headers\n end",
"def room_images(request)\n room_images = RoomImage.select(\n :id,\n :pos_x,\n :pos_y,\n :width,\n :height,\n :blob_id\n ).where(escape_room_id: request.escape_room_id)\n\n return GetRoomImagesResponse.new(false, 'Could not get rooms', nil) if room_images.nil?\n\n user = User.find_by_id(EscapeRoom.find_by_id(request.escape_room_id).user_id)\n data = room_images.map do |k|\n blob_url = if (k.blob_id != 0) && !ActiveStorageBlobs.find_by_id(k.blob_id).nil?\n Rails.application.routes.url_helpers.polymorphic_url(\n user.graphic.blobs.find_by_id(k.blob_id), host: ENV.fetch('BLOB_HOST', 'localhost:3000')\n )\n else\n './assets/images/room1.png'\n end\n { room_image: k,\n src: blob_url }\n end\n GetRoomImagesResponse.new(true, 'Room Images Obtained', data)\n rescue StandardError\n GetRoomImagesResponse.new(false, 'Could not get room images', nil)\n end",
"def getimagesinfo\n trek = Trek.find_by_id(params[:id])\n send_data(trek.get_images_info.to_json,\n {:type => \"application/json\", :disposition => \"inline\"})\n end",
"def show\n @image_url = ImageUrl.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image_url }\n end\n end",
"def images\n end",
"def get_images\n images = get(\"cloud-instances/#{guid}/images\")[\"images\"] || []\n\n images.map do |image|\n get_image(image[\"imageID\"])\n end.compact\n end",
"def call_ig_api(url, count = 10, image_size = 'low_resolution')\n images = []\n client_id = '5607761e84f14957963372e68c0409b8'\n begin\n ig = HTTParty.get(\"#{url}?client_id=#{client_id}&count=#{count}\")\n if ig['meta'] && ig['meta']['code'] == 200 && ig['data'].size > 0\n images = ig['data'].map{ |d| d['images'][image_size]['url'] }\n end\n rescue => _e\n nil\n end\n images\n end",
"def info( options =nil )\n json = request 'images.getInfo', options, :ImageID => image_id, :ImageKey => key\n \n image = upper_hash_to_lower_hash( json['Image'] )\n image.merge!( :image_id => image[\"id\"] )\n \n OpenStruct.new( image ) \n end",
"def get_slide_images_with_http_info(name, slide_index, password = nil, folder = nil, storage = nil)\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SlidesApi.get_slide_images ...'\n end\n\n # verify the required parameter 'name' is set\n if @api_client.config.client_side_validation && name.nil?\n fail ArgumentError, \"Missing the required parameter 'name' when calling SlidesApi.get_slide_images\"\n end\n # verify the required parameter 'slide_index' is set\n if @api_client.config.client_side_validation && slide_index.nil?\n fail ArgumentError, \"Missing the required parameter 'slide_index' when calling SlidesApi.get_slide_images\"\n end\n # resource path\n local_var_path = '/slides/{name}/slides/{slideIndex}/images'\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'name', name)\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'slideIndex', slide_index)\n\n # query parameters\n query_params = {}\n query_params[:'folder'] = @api_client.prepare_for_query(folder) unless folder.nil?\n query_params[:'storage'] = @api_client.prepare_for_query(storage) unless storage.nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n header_params[:'password'] = password unless password.nil?\n\n # http body (model)\n post_body = nil\n\n # form parameters\n post_files = []\n\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :body => post_body,\n :files => post_files,\n :auth_names => auth_names,\n :return_type => 'Images')\n return data, status_code, headers\n end",
"def imagga_images_info\n self.images.non_installation.collect do |image|\n image_options = { :image_url => image.filename.to_s }\n image_options.merge! pattern: image.pattern.name if image.pattern.present?\n image_options.merge! thumb_url: image.s3_path if image.s3_path.present?\n { :url => image.filename.to_s.gsub('https','http').gsub('-dev',''), :id => image.filename.path, :filepath => image.filename.path, :metadata => self.specifications_hash.merge(image_options) }\n end.flatten\n end",
"def info(session, id)\n read_task('rvpe.image.info', session) do\n rc = call_one_xmlrpc('one.image.info', session, id)\n raise rc[1] unless rc[0]\n\n unless image_is_public?(rc[1])\n unless image_is_owned_by_session_owner?(rc[1], session)\n msg = \"You don't have permission to access the image.\"\n admin_session(session, true, msg) do; end\n end\n end\n\n doc = weave_image_size_to_xml(rc[1])\n [true, doc.to_s]\n end\n end",
"def index\n if current_user.is_admin\n @images = Image.masters.find(:all)\n else\n @images = Image.this_user(current_user).masters.find(:all)\n end\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end",
"def get_image_details(token, assetIds)\r\n\r\n request = {\r\n :RequestHeader => {\r\n :Token => token,\r\n :CoordinationId => \"MyUniqueId\"\r\n },\r\n :GetImageDetailsRequestBody => {\r\n :CountryCode => \"USA\",\r\n :ImageIds => assetIds,\r\n :Language => \"en-us\"\r\n }\r\n }\r\n\r\n response = post_json(request, \"https://connect.gettyimages.com/v1/search/GetImageDetails\")\r\n\r\n # status = response[\"ResponseHeader\"][\"Status\"]\r\n # images = response[\"GetImageDetailsResult\"][\"Images\"]\r\n end",
"def to_api\n\n results = {\n 'client_id' => id,\n 'name' => name,\n 'nation_builder' => nation_builder_crm_authentication ? nation_builder_crm_authentication.to_api : nil\n }\n\n Client::IMAGE_SIZES.each do |label, size|\n results[\"image_#{label}\"] = avatar(label)\n end\n\n return results;\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def list_images # rubocop:disable AbcSize, MethodLength\n list = images\n return list.message if list.is_a?(DropletKit::Error)\n list = list.collect do |image|\n {\n 'name' => image['name'] || image['message'],\n 'value' => image['id'].to_s\n }\n end\n filter = Util.serialize_csv(Config.query_params['filter'])\n return list.select { |i| filter.any? { |f| i['name'] =~ /#{f}/i } } if filter\n list\n end",
"def download_image_online_with_http_info(document, index, format, password = nil)\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SlidesApi.download_image_online ...'\n end\n\n # verify the required parameter 'document' is set\n if @api_client.config.client_side_validation && document.nil?\n fail ArgumentError, \"Missing the required parameter 'document' when calling SlidesApi.download_image_online\"\n end\n # verify the required parameter 'index' is set\n if @api_client.config.client_side_validation && index.nil?\n fail ArgumentError, \"Missing the required parameter 'index' when calling SlidesApi.download_image_online\"\n end\n # verify the required parameter 'format' is set\n if @api_client.config.client_side_validation && format.nil?\n fail ArgumentError, \"Missing the required parameter 'format' when calling SlidesApi.download_image_online\"\n end\n # verify enum value\n if @api_client.config.client_side_validation && !['Jpeg', 'Png', 'Gif', 'Bmp', 'Tiff'].any?{ |s| s.casecmp(format)==0 }\n fail ArgumentError, \"Invalid value for parameter format: \" + format + \". Must be one of Jpeg, Png, Gif, Bmp, Tiff\"\n end\n # resource path\n local_var_path = '/slides/images/{index}/{format}'\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'index', index)\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'format', format)\n\n # query parameters\n query_params = {}\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['multipart/form-data'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n header_params[:'password'] = password unless password.nil?\n\n # http body (model)\n post_body = nil\n\n # form parameters\n post_files = []\n if document\n post_files = post_files.push(document)\n end\n\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :body => post_body,\n :files => post_files,\n :auth_names => auth_names,\n :return_type => 'File')\n return data, status_code, headers\n end",
"def show\n @image = Image.find(params[:id])\n\t\t\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def images (size_requested = 'all')\n returned_images = []\n images = @record.fetch('ImageInfo', {})\n if images.count > 0\n images.each do |image|\n if size_requested == image['Size'] || size_requested == 'all'\n returned_images.push({size: image['Size'], src: image['Target']})\n end\n end\n end\n returned_images\n end",
"def index\n @tags = params[:tags] ? params[:tags].split(Image.tags_separator) : nil\n\n if @tags\n include_tags = @tags.reject {|t| t.slice(0, 1) == '-'}\n exclude_tags = @tags.select {|t| t.slice(0, 1) == '-'}\n exclude_tags.map! {|t| t[1..-1]}\n\n @images = Image.recent.tagged_with_all(include_tags).not_in(:tags_array => exclude_tags)\n else\n @images = Image.recent\n end\n\n @images = Kaminari.paginate_array(@images).page(params[:page]).per(200)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @images }\n format.atom\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def show\n @photos = Photo.all\n @post = Post.find(params[:id])\n x = @post.image.url\n\n #private_key = Google::Auth::ServiceAccountCredentials.make_creds(\n #scope: 'https://www.googleapis.com/auth/cloud-platform',\n #json_key_io: StringIO.new(ENV['VISION_KEYFILE_JSON'])\n #)\n\n\n vision = Google::Cloud::Vision.new(\n project: \"redoproject-163021\",\n keyfile: \"config/redoproject-e87605fb29d9.json\",\n )\n @image = vision.image(x)\n @labels = @image.labels\n #logs\n @logs = @image.logos\n #web\n p @web = @image.web.entities\n\nend",
"def show\n if params.has_key?(:page)\n @images = Kaminari.paginate_array(@image_label_set.images).page(params[:page])\n else\n @images = Kaminari.paginate_array(@image_label_set.images).page(1)\n end\n end",
"def images(credentials, opts=nil)\n image_list = []\n terremark_client = new_client(credentials)\n safely do\n vdc_id = terremark_client.default_vdc_id\n catalogItems = terremark_client.get_catalog(vdc_id).body['CatalogItems']\n catalogItems.each{ |catalog_item|\n current_item_id = catalog_item['href'].split('/').last\n current_item = terremark_client.get_catalog_item(current_item_id).body['Entity']\n if(current_item['type'] == 'application/vnd.vmware.vcloud.vAppTemplate+xml')\n image_list << convert_image(current_item, credentials.user)\n end\n } #end of catalogItems.each\n end\n image_list = filter_on( image_list, :id, opts )\n image_list = filter_on( image_list, :architecture, opts )\n image_list = filter_on( image_list, :owner_id, opts )\n image_list\n end",
"def evaluate_thumbnail_visibility\n case thumbnail_visibility\n when \"open\", \"low_res\"\n send_thumbnail\n when \"authenticated\", \"emory_low\" # authenticated is also called \"Emory High Download\"\n return head :forbidden unless valid_cookie?\n send_thumbnail\n when \"rose_high\"\n return head :forbidden unless user_ip_rose_reading_room?\n send_thumbnail\n else\n head :forbidden\n end\n end",
"def download_images_with_http_info(name, format, password = nil, folder = nil, storage = nil)\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: SlidesApi.download_images ...'\n end\n\n # verify the required parameter 'name' is set\n if @api_client.config.client_side_validation && name.nil?\n fail ArgumentError, \"Missing the required parameter 'name' when calling SlidesApi.download_images\"\n end\n # verify the required parameter 'format' is set\n if @api_client.config.client_side_validation && format.nil?\n fail ArgumentError, \"Missing the required parameter 'format' when calling SlidesApi.download_images\"\n end\n # verify enum value\n if @api_client.config.client_side_validation && !['Jpeg', 'Png', 'Gif', 'Bmp', 'Tiff'].any?{ |s| s.casecmp(format)==0 }\n fail ArgumentError, \"Invalid value for parameter format: \" + format + \". Must be one of Jpeg, Png, Gif, Bmp, Tiff\"\n end\n # resource path\n local_var_path = '/slides/{name}/images/download/{format}'\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'name', name)\n local_var_path = @api_client.replace_path_parameter(local_var_path, 'format', format)\n\n # query parameters\n query_params = {}\n query_params[:'folder'] = @api_client.prepare_for_query(folder) unless folder.nil?\n query_params[:'storage'] = @api_client.prepare_for_query(storage) unless storage.nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['multipart/form-data'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n header_params[:'password'] = password unless password.nil?\n\n # http body (model)\n post_body = nil\n\n # form parameters\n post_files = []\n\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:POST, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :body => post_body,\n :files => post_files,\n :auth_names => auth_names,\n :return_type => 'File')\n return data, status_code, headers\n end",
"def list_private_virtual_machine_images\n request_path = '/services/vmimages'\n request = ManagementHttpRequest.new(:get, request_path, nil)\n response = request.call\n Serialization.virtual_machine_vmimages_from_xml(response)\n end",
"def get_market_images\n urns = @options.market_image_urns\n\n images = if urns\n urns.collect do |urn|\n publisher, offer, sku, version = urn.split(':')\n\n ::Azure::Armrest::VirtualMachineImage.new(\n :location => @ems.provider_region,\n :publisher => publisher,\n :offer => offer,\n :sku => sku,\n :version => version,\n :id => urn\n )\n end\n else\n gather_data_for_this_region(@vmis)\n end\n\n process_collection(images, :vms) { |image| parse_market_image(image) }\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image }\n end\n end",
"def show\n @@pict_per_page = 20\n @image_gallery_group = ImageGalleryGroup.find( params[ :id ] )\n @page_count = 1 + @image_gallery_group.all_images_size / @@pict_per_page\n @page_count -= 1 if (@image_gallery_group.all_images_size % @@pict_per_page == 0 && @page_count > 1)\n pre = params[ :page ].to_i\n pre = 1 if (pre <= 0 || pre > @page_count )\n @page = pre\n @all_images, d = @image_gallery_group.get_images( (@page - 1) * @@pict_per_page, @@pict_per_page )\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image_gallery_group }\n end\n end",
"def show\n\n # Get random gif from Giphy (http://giphy.com)\n response = HTTParty.get('http://api.giphy.com/v1/gifs/search?' +\n 'q=' + @boolio.val.to_s +\n '&api_key=dc6zaTOxFJmzC' + # provided for public beta testing via (https://github.com/Giphy/GiphyAPI#public-beta-key)\n '&limit=1' + # only want 1\n '&offset=' + rand(30).to_s + # pull from the top 10 most relevant images\n '&rating=pg' + # afraid if this is any higher\n '&fmt=json' # seems to return JSON anyway, but just in case\n )\n\n if response.code == 200\n\n # conver to JSON\n json = JSON.parse response.body, object_class: OpenStruct\n\n # set image from returned results\n @image = json.data[0].images if json.data.count > 0\n end\n\n end",
"def show\n user_id, id, ref = params[:user_id], params[:id], params[:ref]\n img = nil\n if id == '0' && ref\n # do lookup by reference\n img = Image.find_user_image_by_ref(user_id, params[:ref])\n elsif id != '0'\n # do lookup by guid\n img = Image.find_user_image_by_guid(user_id, id)\n end\n return not_found if !img\n\n respond_to do |format|\n format.json { render :json => img.js_serialize, :callback => params[:callback] }\n end\n end",
"def show\n @image = Image.find(params[:id])\n\n @previous = @image.previous_image\n @next = @image.next_image\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @image }\n end\n end",
"def available_images\n @available_images ||= images.sort_by(&:created_at).each_with_object({}) do |image, mapping|\n key = image_key(image)\n mapping[key] = image.image_id if key != ':'\n end\n end",
"def images_for_gallery gallery_id, info_level = \"Full\", include_photos = \"true\"\n\t\t\t@response = api_request 'LoadPhotoSet', [gallery_id, info_level, include_photos]\n\t\t\traise ZenfolioAPI::ZenfolioAPISessionError, @response['error']['message'] if @response['result'].nil? && @response['error'].length > 0\n\n\t\t\t@response['result']['Photos'].each do |value|\n\t\t\t\taccess_descriptor = ZenfolioAPI::Model::AccessDescriptor.new(:realm_id => value['AccessDescriptor']['RealmId'], \n\t\t\t\t\t:access_type => value['AccessDescriptor']['AccessType'], :is_derived => value['AccessDescriptor']['IsDerived'], \n\t\t\t\t\t:access_mask => value['AccessDescriptor']['AccessMask'], :password_hint => value['AccessDescriptor']['PasswordHint'], \n\t\t\t\t\t:src_password_hint => value['AccessDescriptor']['SrcPasswordHint'])\n\n\t\t\t\t@photos << ZenfolioAPI::Model::Image.new(:id => value['Id'], :width => value['Width'], :height => value['Height'], :sequence => value['Sequence'], \n\t\t\t\t\t:access_descriptor => access_descriptor, :owner => value['Owner'], :title => value['Title'], :mime_type => value['MimeType'], \n\t\t\t\t\t:size => value['Size'], :gallery => value['Gallery'], :original_url => value['OriginalUrl'], :url_core => value['UrlCore'], \n\t\t\t\t\t:url_host => value['UrlHost'], :url_token => value['UrlToken'], :page_url => value['PageUrl'], :mailbox_id => value['MailboxId'], \n\t\t\t\t\t:text_cn => value['TextCn'], :flags => value['Flags'], :is_video => value['IsVideo'], :duration => value['Duration'], :caption => value['Caption'], \n\t\t\t\t\t:file_name => value['FileName'], :uploaded_on => value['UploadedOn']['Value'], :taken_on => value['TakenOn']['Value'], :keywords => value['keywords'], \n\t\t\t\t\t:categories => value['Categories'], :copyright => value['Copyright'], :rotation => value['Rotation'], :exif_tags => value['ExifTags'], :short_exif => value['ShortExif'])\n\t\t\tend\n\n\t\t\t@photos\n\t\tend",
"def images artist\n url = \"http://developer.echonest.com/api/v4/artist/images?api_key=#{ECHONEST_API_KEY}&name=#{artist}&format=json&results=#{RESULTS}&start=0&license=unknown\"\n result = parseURL url\n result[\"response\"][\"images\"]\nend",
"def index\n @images = Image.all\n\n respond_to do |format|\n format.html # index.html.slim\n format.json { render json: @images }\n end\n end",
"def image\n response[\"image\"]\n end",
"def show\n @image = Image.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.jpg # show.jpg.erb\n format.png # show.png.erb\n format.xml { render :xml => @image }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @image }\n end\n end",
"def index\n @images = Image.where(admin_ok: true)\n end",
"def image(id, nsfw = false)\n img = get url: \"images/#{id}\", nsfw: nsfw\n img['image'] if img\n end",
"def index\n @images = @user.images.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @images }\n end\n end",
"def images; end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def show\n @image = Image.find(params[:id])\n\n render json: @image\n end",
"def show\n @image = Image.find(params[:id])\n respond_with(@image, :layout => !request.xhr?)\n end",
"def public_image_uploads\n self.image_uploads.where(privacy: 'public').presence || [header_asset_for(self.class.name)]\n end",
"def show\n begin\n query = \"created_by = \\\"#{current_user.email}\\\" OR visibility = 'public'\"\n @photo = Photo.where(query).with_attached_images.find(params[:id])\n @user_current = current_user.email\n\n #/photos/1: when 1 doesn't belong to the current user and is private\n rescue StandardError => e\n redirect_to photos_path, notice: 'Sorry, you have no permission to view this photo.'\n end\n end",
"def print_images_list_by_page project:, page_size: 10\n client = ::Google::Cloud::Compute::V1::Images::Rest::Client.new\n\n # Make the request to list all non-deprecated images in a project.\n request = {\n project: project,\n # max_results indicates the maximum number of items that will be returned per page.\n max_results: page_size,\n # Listing only non-deprecated images to reduce the size of the reply.\n filter: \"deprecated.state != DEPRECATED\"\n }\n\n # Call the each_page method of the returned enumerable to have more granular control\n # of iteration over paginated results from the API. Each time you access the next\n # page, the library retrieves that page from the API.\n page_index = 0\n client.list(request).each_page do |page|\n puts \"Page index: #{page_index}\"\n page_index += 1\n page.each do |image|\n puts \" - #{image.name}\"\n end\n end\nend",
"def index\n @images = getmydata(\"Image\")\n pagination\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @images }\n end\n end",
"def show\n @@pict_per_page = 20\n @image_gallery_group = ImageGalleryGroup.find(params[:id])\n @image_gallery_group = ImageGalleryGroup.find( params[ :id ] )\n @page_count = 1 + @image_gallery_group.all_images_size / @@pict_per_page\n @page_count -= 1 if (@image_gallery_group.all_images_size % @@pict_per_page == 0 && @page_count > 1)\n pre = params[ :page ].to_i\n pre = 1 if (pre <= 0 || pre > @page_count )\n @page = pre\n @all_images, d = @image_gallery_group.get_images( (@page - 1) * @@pict_per_page, @@pict_per_page )\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @image_gallery_group }\n end\n end"
] | [
"0.69040823",
"0.679674",
"0.6651114",
"0.66303474",
"0.66300285",
"0.66095346",
"0.6472469",
"0.63388175",
"0.6218389",
"0.61983496",
"0.6182718",
"0.6181039",
"0.6158568",
"0.61570495",
"0.6153924",
"0.61496735",
"0.61372",
"0.6112597",
"0.61104816",
"0.6085985",
"0.60652006",
"0.6061754",
"0.6042896",
"0.6024886",
"0.59958524",
"0.5992455",
"0.5988467",
"0.59595793",
"0.59539354",
"0.59440327",
"0.59431374",
"0.5941431",
"0.5939851",
"0.5933349",
"0.59220475",
"0.5916479",
"0.590011",
"0.5888524",
"0.58722645",
"0.5870129",
"0.58673793",
"0.5856572",
"0.5853323",
"0.5849955",
"0.5842828",
"0.583384",
"0.58312935",
"0.58164275",
"0.5816399",
"0.5801756",
"0.5761953",
"0.57525325",
"0.57502526",
"0.5742545",
"0.57406366",
"0.5735745",
"0.57354516",
"0.57352537",
"0.5727938",
"0.5727938",
"0.5727938",
"0.5727938",
"0.5727254",
"0.5725707",
"0.5724069",
"0.57234925",
"0.5722966",
"0.57179105",
"0.57161915",
"0.57054394",
"0.570169",
"0.5701326",
"0.5700313",
"0.56963867",
"0.5694361",
"0.56898606",
"0.56865025",
"0.568249",
"0.56810844",
"0.56756973",
"0.56700957",
"0.5658373",
"0.56549597",
"0.56538665",
"0.5652735",
"0.5646865",
"0.5643501",
"0.56422895",
"0.5637695",
"0.56370795",
"0.5631435",
"0.56307745",
"0.56287134",
"0.56287134",
"0.56251454",
"0.5624629",
"0.5620581",
"0.56186706",
"0.56161094",
"0.56105375"
] | 0.724104 | 0 |
this makes puppet serialize it as an array for backwards compatibility | def to_zaml(z)
to_a.to_zaml(z)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def serialize_array\n `self.serializeArray()`.map { |e| Hash.new(e) }\n end",
"def to_a; Array(force) end",
"def to_ary() end",
"def to_ary\n end",
"def to_a\n to_array(@version)\n end",
"def to_a\n to_array(@version)\n end",
"def to_a; [Array]; end",
"def to_ary; self.to_a; end",
"def to_ary; []; end",
"def to_ary\n to_a\n end",
"def to_ary\n to_a\n end",
"def array()\n\t\t@array\n\tend",
"def to_ary\n self.to_a\n end",
"def to_ary\n\t []\n\tend",
"def to_a\n array\n end",
"def to_a()\n end",
"def to_ary; nil; end",
"def to_s\n to_ary\n end",
"def to_ary # :nodoc:\n attributes.to_a\n end",
"def to_ary\n\t\t@data.flatten\n\tend",
"def converted_arrays; end",
"def to_ary\n [ self ]\n end",
"def to_ary\n self.to_a\n end",
"def to_ary\n body.to_ary\n end",
"def to_a #:nodoc:\n data.to_a\n end",
"def array\n raise \"Not implemented\"\n end",
"def to_ary\n\t\t\treturn @elements.to_ary\n\t\tend",
"def serialize; end",
"def serialize; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_a; end",
"def to_array_form(external_ref_path)\n # TODO: use regexp disjunction\n external_ref_path.gsub(/^node\\[/, '').gsub(/^service\\[/, '').gsub(/\\]$/, '').split('][')\n end",
"def to_ary\n self.map{|result| result}\n end",
"def to_ary\n nil\n end",
"def _to_a\n @_to_a = self::VERSION.split('.')\n end",
"def to_a; [self] end",
"def to_a; [self] end",
"def __array__; self; end",
"def to_a\n [ @content_type, @extensions, @encoding, @system, @obsolete, @docs,\n @url, registered? ]\n end",
"def to_a\n super.map{|v| Array === v ? v.to_a : v}\n end",
"def array\n @array\n end",
"def to_a\n Array.wrap(self)\n end",
"def array\n @@array\n end",
"def to_ary()\n return nil;\n end",
"def to_a; []; end",
"def to_a\n end",
"def to_a\n end",
"def to_a\n end",
"def to_a\n end",
"def to_a\n attributes.to_a\n end",
"def to_array(name)\n if name.is_a? Array\n name\n elsif name.is_a? Hash\n if name.keys.size == 1\n [name.values].flatten\n else\n STDERR.puts \"invalid group configuration: #{name}\"\n exit(-1)\n end\n else\n [name]\n end\nend",
"def serialize\n end",
"def to_a() end",
"def to_a() end",
"def to_a() end",
"def to_a() end",
"def to_a() end",
"def to_a() end",
"def contact_methods_as_array\n JSON.parse(self.contact_methods)\n end",
"def to_a; [self]; end",
"def to_ary \n\t\t\treturn @elem.to_ary\n\t\tend",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def to_json\n to_a.to_json\n end",
"def serialize(object)\n object ? constraint.convert(object) : []\n end",
"def to_flex_array\n self\n end",
"def encode_to_array\n components = []\n components << [self.class.id, @channel, @payload.bytesize].pack(PACK_CHAR_UINT16_UINT32)\n components << self.class.encoded_payload(@payload)\n components << FINAL_OCTET\n components\n end",
"def json_serialize\n end",
"def to_a\n end",
"def to_a\n end",
"def to_a\n end",
"def to_a\n @arr\n end",
"def array(value)\n value.respond_to?(:to_ary) ? value : [value]\n end",
"def to_ary\n [\"#{id}: #{description}\", @raw_data]\n end",
"def to_a \n return @data\n end",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def to_ary\n nil\n end",
"def record_to_array(r, attrs)\n []\n end",
"def to_ary\n\t self.values\n\t end",
"def get_items\r\n @arr.to_a\r\n end",
"def to_a\n [ self ]\n end",
"def to_ary\n each.to_a\n end",
"def to_a\n each.force\n end",
"def arrays_to_hashes\n end",
"def to_array(value)\n value.class == Array ? value : [value]\n end",
"def to_a\n @size_dep.depend\n array = []\n Volt.run_in_mode(:no_model_promises) do\n attributes.size.times do |index|\n array << deep_unwrap(self[index])\n end\n end\n array\n end",
"def serialize_attrs\n @serialize_attrs ||= []\n end",
"def to_a\n @data\n end",
"def test_array\n x = [1, \"two\", 3.0]\n assert_equal x, Marshal.load(Marshal.dump(x))\n end",
"def serialize\n \n end",
"def _array(obj)\n obj.map { |v| _renc(v) }\n end"
] | [
"0.71346045",
"0.708722",
"0.7059569",
"0.69795847",
"0.6864735",
"0.6864735",
"0.6754864",
"0.67372924",
"0.67151636",
"0.6714871",
"0.6714871",
"0.6677164",
"0.6673366",
"0.66657084",
"0.66579926",
"0.6644083",
"0.6621313",
"0.6604245",
"0.6591526",
"0.65850824",
"0.65720284",
"0.6546956",
"0.65278524",
"0.6510295",
"0.64925355",
"0.6473254",
"0.6470345",
"0.6420683",
"0.6420683",
"0.64146507",
"0.64146507",
"0.64146507",
"0.64146507",
"0.64146507",
"0.64146507",
"0.64146507",
"0.64146507",
"0.6413956",
"0.6412518",
"0.6408675",
"0.6396318",
"0.6390302",
"0.6390302",
"0.6380873",
"0.6377024",
"0.6376674",
"0.63731796",
"0.6348063",
"0.634279",
"0.63299114",
"0.6323453",
"0.6323275",
"0.6323275",
"0.6323275",
"0.6323275",
"0.6320222",
"0.631265",
"0.63123995",
"0.6304643",
"0.6304643",
"0.6304643",
"0.6304643",
"0.63040465",
"0.63040465",
"0.63007635",
"0.62981194",
"0.6287713",
"0.6275936",
"0.6275936",
"0.6275936",
"0.6275936",
"0.6273092",
"0.62719154",
"0.62657213",
"0.6260896",
"0.6255101",
"0.6253216",
"0.6253216",
"0.6253216",
"0.62412536",
"0.62340754",
"0.62301797",
"0.62101305",
"0.620926",
"0.620926",
"0.620926",
"0.620926",
"0.62035996",
"0.61975724",
"0.6190534",
"0.61706674",
"0.61543995",
"0.6147996",
"0.6140616",
"0.6130613",
"0.61182374",
"0.6116563",
"0.6114153",
"0.6091703",
"0.6090699",
"0.6085665"
] | 0.0 | -1 |
The list of record types that are subject to rolebased authorization. | def authorizable_classes
[].tap do |result|
ObjectSpace.each_object(Class) do |c|
next unless c.superclass == ApplicationRecord
result << c if c.ancestors.include?(Record::Authorizable)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new_types\n return [] unless allow_new_records?\n\n # build our list\n Record.record_types - %w{ SOA NS } - self.permissions['protected_types']\n end",
"def permissions\n Roles.type_map[role_type].permissions\n end",
"def type\n TYPES[roletype_id]\n end",
"def roles\n # we can't use abilities as those are not defined when creating a new operator that is not yet saved\n #result = abilities.collect(&:name)\n authorizations.collect{|auth| auth.ability.try(:name)}\n end",
"def roles\n # we can't use abilities as those are not defined when creating a new operator that is not yet saved\n #result = abilities.collect(&:name)\n authorizations.collect{|auth| auth.ability.try(:name)}\n end",
"def get_restriction_types\n get_restrictions_data['types']\n end",
"def auth_types\n @auth_types ||= hardcoded_auth_types #HTTParty.get(\"#{ENV['PRIVILEGES_BASE_URL']}/patrons.json?sublibrary_code=#{ENV['PRIVILEGES_SUBLIBRARY_CODE']}\").parsed_response\n end",
"def roles\n @contents.select { |c| c.is_a?( Symbol ) }\n end",
"def selectable_access_types\n [['Unrestricted Access', 'open'], ['Controlled Access', 'restricted'], ['Other', 'closed']]\n end",
"def allowed_types\n\treturn [User]\nend",
"def get_record_types\n get_records_with_filter {|records, record_type, offset, ptr| records.push(ptr[0])}\n end",
"def role_list \n @role_list ||= begin\n (store.display_roles | group_store.display_roles)\n end\n end",
"def display_roles\n return [] if ds_field_value?\n [ds_field_value.to_sym]\n end",
"def roles_list(role = nil)\n self.role_symbols\n end",
"def policy_role_policies\n @policy_role_policies ||= Array.new.tap do |uris|\n filters = current_ability.agents.map do |agent|\n \"#{Ddr::Index::Fields::POLICY_ROLE}:\\\"#{agent}\\\"\"\n end.join(\" OR \")\n query = \"#{Ddr::Index::Fields::ACTIVE_FEDORA_MODEL}:Collection AND (#{filters})\"\n results = ActiveFedora::SolrService.query(query, rows: Collection.count, fl: Ddr::Index::Fields::INTERNAL_URI)\n results.each_with_object(uris) { |r, memo| memo << r[Ddr::Index::Fields::INTERNAL_URI] }\n end\n end",
"def roles\n Jak.role_klass.where(id: grants.pluck(:role_ids).compact.flatten)\n end",
"def types\n get_metadata unless @types\n return @types\n end",
"def calculate_authorization_response_types\n []\n end",
"def the_roles\n roles.each.map{|_r| User::ROLES[_r.to_sym] }\n end",
"def allowed_types\n\t\treturn [EmailAddress, Device, Domain, Organization, User, Location, Service, NetApplication, WebApplication]\n\tend",
"def access_types\n @access_types ||= [ALL_ACCESS].tap do |types|\n types << PUBLIC_ACCESS if public?\n types << PROTECTED_ACCESS if protected?\n types << PRIVATE_ACCESS if private?\n types << DIGITIZED_ACCESS if digitized? && !private?\n end\n end",
"def preferred_types(supported_types)\n @preferred_auth_policies.select{|uri| supported_types.member? uri}\n end",
"def types\n @types ||= []\n end",
"def types\n FIELD_TYPES\n end",
"def role\n permission_type\n end",
"def resources\n Typus::Configuration.roles[self.roles].compact\n end",
"def roles\n User.valid_roles\n end",
"def roles\n roles_as_editor + roles_as_reviewer + role_as_admin\n end",
"def roles\n return [] if !subject.respond_to?(roles_list_meth) || roles_of(subject).blank?\n roles_of(subject).flatten\n end",
"def uses_record_type?\n list_class.attribute_names.include?('record_type')\n end",
"def roles\n self.class.roles.map do |name|\n __send__(name)\n end\n end",
"def data_roles\n if current_user.has_role? :superadmin\n @roles = Role.only_super_admin.accessible_by(current_ability)\n else\n unless current_user.warehouse.blank?\n roles = \"customer\"\n else\n roles = \"supplier\"\n end\n @roles = Role.only_admin_group(roles)\n end\n end",
"def u_types\n u_types = ['Manager', 'QA', 'Developer']\n end",
"def attribute_types\n\t\treturn self.attr_oids.collect {|oid| self.schema.attribute_types[oid] }\n\tend",
"def types\n @data.keys & TYPES\n end",
"def field_types\r\n return @field_types\r\n end",
"def allowed_types\n [User]\nend",
"def permission_resources\n %w{roles sites employees classrooms students gapps_org_units}\n end",
"def facility_admin_roles\n ['facility_management', 'setter', 'head_setter', 'marketing', 'guest']\n end",
"def statement_account_types\n @statement_account_types ||= []\n end",
"def role_type\n return User.role_type_from_string(self.role)\n end",
"def course_types\n @course_types ||= Course.where(department_id: self.department_id)\n .where(course_num: self.course_num)\n .where(term_id: self.search.term_id)\n .select(:type)\n .map(&:type)\n .uniq\n end",
"def role_enum\n\t\t[:user,:vip,:admin]\n\tend",
"def role_permissions\n return @role_permissions\n end",
"def display_roles\n return [] if !ds_field_value?\n ds_field_value.split(',').map{|r| r.strip }.map(&:to_sym)\n end",
"def get_roles\n result = {}\n roles.each do |role|\n resource = if role.resource_type == 'Conference'\n Conference.find(role.resource_id).short_title\n elsif role.resource_type == 'Track'\n Track.find(role.resource_id).name\n end\n if result[role.name].nil?\n result[role.name] = [resource]\n else\n result[role.name] << resource\n end\n end\n result\n end",
"def available_types\n gather do |c|\n c.respond_to?(:model_types) ? c.model_types : []\n end\n end",
"def auth_types_collection\n @auth_types_h ||= Rails.cache.fetch \"auth_types_h\", :expires_in => 24.hours do\n # Uses the Hash object to cast a mapped array as a hash\n Hash[auth_types.map {|x| [x[\"code\"], x[\"web_text\"]]}]\n end\n end",
"def resources\n Typus::Configuration.roles.keys.map do |key|\n Typus::Configuration.roles[key].keys\n end.flatten.sort.uniq.delete_if { |x| models.include?(x) }\n end",
"def list_review_roles\n\n @review_roles = Role.get_review_roles\n\n end",
"def index\n @legal_record_types = LegalRecordType.all\n end",
"def capable_auth_types; end",
"def allowed_types\n [ Entities::Account,\n Entities::DnsRecord, \n Entities::DnsServer, \n Entities::DocFile,\n Entities::EmailAddress,\n Entities::FacebookAccount,\n Entities::Finding,\n Entities::Host, \n Entities::LocalImage,\n Entities::RemoteImage,\n Entities::KloutAccount,\n Entities::NetBlock,\n Entities::NetSvc,\n Entities::Organization,\n Entities::ParsableFile,\n Entities::ParsableText,\n Entities::PdfFile,\n Entities::Person,\n Entities::PhysicalLocation, \n Entities::SearchString, \n Entities::TwitterAccount,\n Entities::Username,\n Entities::WebApplication,\n Entities::WebForm,\n Entities::WebPage,\n Entities::XlsFile ]\n end",
"def auth_types\n @auth_types ||= [\"CB\"]\n end",
"def auth_types_array\n @auth_types_array ||= Rails.cache.fetch \"auth_types_array\", :expires_in => 24.hours do\n auth_types.collect {|x| x[\"code\"] }\n end\n end",
"def resource_type_filter\n return filter_for(:resource_type_id, \n objects_to_names_and_ids(current_user.company.resource_types),\n session[:resource_filters], _(\"Resource Type\"))\n end",
"def get_resource_types\n Occi::Log.debug(\"Getting resource types ...\")\n collection = @model.get Occi::Core::Resource.kind\n collection.kinds.collect { |kind| kind.term }\n end",
"def content_type_denylist\n end",
"def get_activites_supportedtypes()\n @restv9.get_activites_supportedobjecttypes()\n end",
"def available_types\n # TODO pull this from DB or config\n [\n :kiosk,\n :ride,\n :store,\n :restaurant\n ]\n end",
"def roles\n @resource_roles\n end",
"def roles\n users.map { |item| item['roles'] }.flatten.uniq\n end",
"def allowed_types\n\treturn [Organization]\nend",
"def get_privilege_types\n response = nexus.get(nexus_url(\"service/local/privilege_types\"))\n case response.status\n when 200\n return response.content\n else\n raise UnexpectedStatusCodeException.new(response.status)\n end\n end",
"def roles\n response[\"roles\"]\n end",
"def roles\n self.dig_for_array(\"roles\")\n end",
"def accessible_roles\n index = ROLES.index(self.role)\n roles = User::ROLES[0..index].map{|x| [x.humanize, x]} if index\n return roles\n end",
"def resource_role_scopes\n return @resource_role_scopes\n end",
"def possible_roles\n ([self.role] + DEFAULT_ROLES).uniq.compact.collect { |r| [ r.titleize, r ]}\n end",
"def admin_types_array\n @admin_types_array = AdminSessionSetting.get_bits_set_for_admin_types(admin_types)\n end",
"def user_type\n user_types = []\n if admin\n user_types << \"Admin\"\n end\n if agent\n user_types << \"Agent\"\n end\n if customer\n user_types << \"Customer\"\n end\n user_types.join(\" / \")\n end",
"def account_type\n self.role.name\n end",
"def account_type\n self.role.name\n end",
"def types\n @types ||= inject({}) { |memo, schema| memo.merge(schema.types) }\n end",
"def org_types\n organizations.map(&:organization_type)\n end",
"def type_names\n\t\ttypes_names = DATABASE.execute(\"SELECT name FROM types WHERE id='#{self.type_id[0]}' OR id='#{self.type_id[1]}';\") \t\n\t\treturn types_names\n\tend",
"def roles\n # rocking this in memory because eager-loading\n roles = object.paper_roles.select { |role|\n role.user_id == scoped_user.id\n }.map(&:description)\n roles << \"My Paper\" if object.user_id == scoped_user.id\n roles\n end",
"def facility_account_types\n @facility_account_types ||= []\n end",
"def allowed_types\n [ Entities::DnsRecord, \n Entities::Host, \n Entities::Organization, \n Entities::User]\nend",
"def types\n list = Array.new\n\n if @db != nil\n is_ok = false\n\n begin\n stm = @db.prepare( 'SELECT qType FROM qryResults GROUP BY qType ORDER BY qType')\n rs = stm.execute\n\n rs.each do |row|\n list.push row['qType']\n end\n\n stm.close\n is_ok = true\n rescue ::SQLite3::Exception => e\n Maadi::post_message(:Warn, \"Repository (#{@type}:#{@instance_name}) encountered an SELECT Types error (#{e.message}).\")\n end\n end\n\n return list\n end",
"def roles\n groups.find(:all, :conditions => {:context => Group::GroupType::ROLE})\n end",
"def resource_type_filter\n return filter_for(:resource_type_id,\n objects_to_names_and_ids(current_user.company.resource_types),\n session[:resource_filters], ResourceType.model_name.human)\n end",
"def get_context_types_list\n [\n context_type_meeting_header,\n context_type_category_header,\n context_type_relay_header,\n context_type_team_ranking,\n context_type_result_row,\n context_type_relay_row,\n context_type_ranking_row\n ]\n end",
"def questionnaire_types\n questionnaire_type_list = Array.new\n self.questionnaires.each do |questionnaire|\n if !self.questionnaires.include?(questionnaire.type)\n questionnaire_type_list << questionnaire.type\n end\n end\n questionnaire_type_list\n end",
"def roles\n return proposed_snapshot.roles if proposed? \n return active_snapshot.roles if active?\n []\n end",
"def models\n available_models = Typus::Configuration.config\n models_for_this_user = []\n available_models.to_a.each do |m|\n models_for_this_user << m[0].constantize if m[1]['roles'].include? self.role\n end\n return models_for_this_user\n rescue\n []\n end",
"def roles\r\n @roles ||= user_roles.map(&:name)\r\n end",
"def content_types\n return @content_types\n end",
"def index\n @vendor_role_types = VendorRoleType.all\n end",
"def list_roles\n Character.where(actor:self).map{|c|\"#{c.name} - #{c.show.name}\"}\n end",
"def list_authorized?\n authorized_for?(:crud_type => :read)\n end",
"def list_authorized?\n authorized_for?(:crud_type => :read)\n end",
"def types\n load_schema! unless schema_loaded?\n @types\n end",
"def get_context_types_list\n [\n context_type_meeting_header,\n context_type_category_header,\n context_type_relay_header,\n context_type_team_ranking,\n context_type_team_stats,\n context_type_stats,\n context_type_result_row,\n context_type_relay_row,\n context_type_ranking_row,\n context_type_stats_details\n ]\n end",
"def authorize_records(action, model_class)\n true\n end",
"def default_roles\n if self.user_type == \"admin\"\n self.role_ids = 1\n elsif self.user_type == \"student\"\n self.role_ids = 10\n end\n end",
"def select_cmux_support_role_types(roles)\n roles.select { |_, r| ROLE_PORT.keys.map.include?(r[:roleType]) }\n end",
"def may_attribute_types( *additional_object_classes )\n\t\treturn self.object_classes( *additional_object_classes ).\n\t\t\tcollect {|oc| oc.may }.flatten.uniq\n\tend",
"def meta_abilities\n User.roles.each do |(k, v)|\n if user.has_role? k\n can \"do_#{k}\".to_sym, :all\n end\n end\n end",
"def affiliate_account_types\n @affiliate_account_types ||= []\n end"
] | [
"0.716263",
"0.64408636",
"0.6407403",
"0.6221865",
"0.6221865",
"0.619954",
"0.6161902",
"0.6125767",
"0.6112597",
"0.61106265",
"0.6098491",
"0.6019373",
"0.60175246",
"0.59589326",
"0.59574884",
"0.5944856",
"0.5934277",
"0.58913875",
"0.5860091",
"0.5839778",
"0.5825049",
"0.5816385",
"0.57878596",
"0.57683915",
"0.57537884",
"0.57313585",
"0.5695984",
"0.56779367",
"0.56755775",
"0.5656688",
"0.56546795",
"0.5641093",
"0.56358176",
"0.56318945",
"0.56230396",
"0.56193775",
"0.5618338",
"0.56134087",
"0.5591268",
"0.5587789",
"0.55618453",
"0.556143",
"0.55547106",
"0.5553888",
"0.5550389",
"0.55455554",
"0.55417764",
"0.5540823",
"0.55331856",
"0.55275095",
"0.55243486",
"0.5524126",
"0.55126977",
"0.55038315",
"0.5502487",
"0.549614",
"0.5491285",
"0.54886174",
"0.54785585",
"0.5473288",
"0.54729444",
"0.54619807",
"0.5460358",
"0.5459679",
"0.54568785",
"0.5454624",
"0.54516006",
"0.5451287",
"0.54480165",
"0.544788",
"0.54462934",
"0.54281723",
"0.54281723",
"0.54260904",
"0.5417765",
"0.54064703",
"0.5403494",
"0.5389227",
"0.5388774",
"0.53872484",
"0.537846",
"0.5377513",
"0.5372067",
"0.5370305",
"0.5369105",
"0.536596",
"0.5359955",
"0.5359248",
"0.5354209",
"0.53540814",
"0.5351806",
"0.5351806",
"0.53499913",
"0.5336929",
"0.533237",
"0.53130823",
"0.5310278",
"0.5308408",
"0.53083926",
"0.5304077"
] | 0.58287704 | 20 |
create a new method called encrypt_file | def encrypt_file(filename)
#open the file by passing it the name and ..
input = File.open(filename, 'r')
#this is a string now so
contents = input.read
encrypted_contents = encrypt_string(contents)
input.close
output = File.open(filename + '.encrypted', 'w')
output.write(encrypted_contents)
output.close
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _encrypt\n cryptor_files(@encrypting)\n end",
"def encrypt; end",
"def encrypt_file(path, password = nil)\n salt = random_bytes(@salt_len)\n iv = random_bytes(@salt_len)\n aes_key, mac_key = keys(salt, password)\n\n cipher = cipher(aes_key, iv)\n hmac = OpenSSL::HMAC.new(mac_key, OpenSSL::Digest::SHA256.new)\n new_path = path + '.enc'\n\n File.open(new_path, 'wb') do |out_file|\n out_file.syswrite salt\n out_file.syswrite iv\n hmac.update iv\n\n file_chunks(path).each do |chunk|\n encrypted = cipher.update(chunk)\n hmac.update encrypted\n out_file.syswrite encrypted\n end\n encrypted = cipher.final\n\n hmac.update encrypted\n out_file.syswrite encrypted\n out_file.syswrite hmac.digest\n end\n new_path\n rescue TypeError, ArgumentError, SystemCallError, IOError => e\n error_handler e\n end",
"def encrypt_file(filename, rotation)\n \t# 1. Create the file handle to the input file\n \t\tinput = File.open(filename, \"r\")\n \t# 2. Read the text of the input file\n \t\tinput_string = input.read\n \t# 3. Encrypt the text\n \t\tencrypted_string = encrypt(input_string, rotation)\n \t# 4. Create a name for the output file\n \t\toutput_file = filename + \".encrypted\"\n \t# 5. Create an output file handle\n \t\tout = File.open(output_file, \"w\")\n \t# 6. Write out the text\n \t\tout.write(encrypted_string)\n \t# 7. Close the file\n \t\tout.close\n \tend",
"def encryptFile(fileIn,conf)\n\nsalt_len = 8\nbuf=''\npassword = conf[:passphrase]\ncipher = 'aes-128-cbc'\nputs aktTime()+' encrypting archive...'\nSTDOUT.flush #write out immediately\nsalt= OpenSSL::Random::pseudo_bytes(salt_len)\n\nc = OpenSSL::Cipher::Cipher.new(cipher)\nc.encrypt\n#generate key + IV from given password\nc.pkcs5_keyivgen(password, salt, 1)\nFile.open(CRYPT_TMP,'wb') do |fo|\n \n fo.write(MAGIC) #write magic string \n fo.write(salt) #write 8 bytes random salt\n File.open(fileIn,'rb') do |fi|\n while fi.read(4096,buf) \n fo.write c.update(buf)\n end\n fo.write( c.final)\n end\nend\n\n#overwrite archive with crypted archive\nputs aktTime()+' archive encrypted '\nFile.rename(CRYPT_TMP,fileIn)\nend",
"def encrypt_file(filename, key)\n File.open(filename) do |file|\n data = file.read\n encrypted64 = hex_to_base64(repeat_key_xor(data, key))\n\n File.open(\"encrypted64_\" + filename, \"w\") do |out_file|\n out_file.write(encrypted64)\n end\n end\nend",
"def encrypt_to(filename)\n self.opts.merge!({output: filename})\n encrypt\n end",
"def encrypt(filename, key)\r\n\t\r\n\t_original = File.open(filename, \"r\")\r\n\t_encrypted = File.open(\"encrypted.txt\", \"w+\")\r\n\t\r\n\t# initialize the keyword as an encryption key\r\n\t_key = EncryptionKey.new(key)\r\n\t\r\n\tlinecount = 1\r\n\t\r\n\t# loop through each line, and then each character, modifying it by the current key value\r\n\t# then write the modified character to the output file\r\n\t_original.each do |line|\r\n\t\tcharcount = 1\r\n\t\tline.split(\"\").each do |originalChar|\r\n\t\t\t\r\n\t\t\ttemp = originalChar.ord\r\n\t\t\ttemp = (temp + _key.nextModVal()) % 256\t\t\t\r\n\t\t\tencryptedChar = temp.chr\r\n\t\t\t_encrypted << encryptedChar\r\n\t\t\tcharcount += 1\r\n\t\tend\r\n\t\tlinecount += 1\r\n\tend\r\n\t\r\nend",
"def encrypt(file)\n unless EncryptFileW(string_check(file).wincode)\n raise SystemCallError.new(\"EncryptFile\", FFI.errno)\n end\n self\n end",
"def unencrypted_path\n yield SafeFile.safepath_to_string(@filename)\n end",
"def decrypt_files\n not_implemented(__method__)\n end",
"def set_encrypted_file\n @encrypted_file = EncryptedFile.find(params[:id])\n end",
"def encrypted_file_params\n params.require(:encrypted_file).permit(:myfile)\n end",
"def encrypt\n self\n end",
"def encrypt_and_return_object_content(public_key_file, content)\r\n public_key = OpenSSL::PKey::RSA.new(File.read(public_key_file))\r\n Base64.encode64(public_key.public_encrypt(content))\r\nend",
"def encrypt_to_file(file_path, value, encode_base64=true)\r\n\r\n # encrypt value.\r\n enc_value = encrypt(value, encode_base64)\r\n\r\n # save file.\r\n File.write(file_path, enc_value)\r\n enc_value\r\n end",
"def initialize(file_name, stream = T.unsafe(nil), encrypter = T.unsafe(nil)); end",
"def encrypt\n self\n end",
"def rotate_encryption!\n io = Lockbox::IO.new(read)\n io.original_filename = file.filename\n previous_value = enable_processing\n begin\n self.enable_processing = false\n store!(io)\n ensure\n self.enable_processing = previous_value\n end\n end",
"def encrypt_to_file(string, secure)\n secure.open('wb') { |s| basic_encrypt(StringIO.new(string), s) }\n end",
"def run_me\r\n key_strings = create_public_private_rsa_key_pair_strings\r\n public_key_string = key_strings['public_key_string']\r\n public_key_file = 'my-public-key.pem'\r\n private_key_string = key_strings['private_key_string']\r\n private_key_file = 'my-private-key.pem'\r\n content_to_encrypt = 'Hello, World!'\r\n bucket_name = 'doc-example-bucket'\r\n object_key = 'my-file.txt'\r\n region = 'us-west-2'\r\n s3_client = Aws::S3::Client.new(region: region)\r\n\r\n unless public_private_rsa_key_pair_files_created?(\r\n public_key_string,\r\n public_key_file,\r\n private_key_string,\r\n private_key_file\r\n )\r\n puts 'Program will stop.'\r\n exit 1\r\n end\r\n\r\n encrypted_content = encrypt_and_return_object_content(\r\n public_key_file,\r\n content_to_encrypt\r\n )\r\n puts \"Encrypted representation of '#{content_to_encrypt}' is:\"\r\n puts encrypted_content\r\n\r\n if encrypted_object_uploaded?(\r\n s3_client,\r\n bucket_name,\r\n object_key,\r\n encrypted_content\r\n )\r\n puts 'Uploaded.'\r\n else\r\n puts 'Not uploaded.'\r\n end\r\nend",
"def encryptor(io, password, &b)\n Encrypt.open(io, password, mtime, &b)\n end",
"def entrar_arquivo\n end",
"def upload_file(s3client,plaintext_key,filename,bucket)\r\n begin\r\n filebody = File.new(filename)\r\n s3enc = Aws::S3::Encryption::Client.new(encryption_key: plaintext_key,\r\n client: s3client)\r\n res = s3enc.put_object(bucket: bucket,\r\n key: filename,\r\n body: filebody)\r\n rescue Aws::S3::Errors::ServiceError => e\r\n puts \"upload failed: #{e}\"\r\n end\r\nend",
"def open_file(client, path, cipher)\n if File.exist?(path)\n File.open(path, 'r') do |f|\n f.each_line do |line|\n client.puts(encrypt(line, cipher))\n end\n end\n end\n end",
"def encrypt_move(directory, key)\n\tfile_names = get_files\n\t#puts 'FILE NAMES ' + file_names.to_s\n\t\n\tenc_names = encrypt_files(file_names, key)\n\t#puts 'ENC NAMES ' + enc_names.to_s\n\t\n\tmove_files(enc_names, directory)\n\tremove_files(enc_names, directory)\nend",
"def encrypt_message plaintext\n key_pair.encrypt plaintext\n end",
"def create(file=@file)\n if self.legacy?\n return unless @password.send(:to_file, self) \n end\n super\n end",
"def get_encrypt_id_by_file_id(file_id)\n @enc_id = UploadFile.find(:first,:select=>\"encrypt_id\",:conditions=>[\"id=?\",file_id])\n return @enc_id.encrypt_id\n end",
"def encryptor(io, password, &b)\n Encrypt.open(io, &b)\n end",
"def create\n @encrypted_file = EncryptedFile.new(encrypted_file_params)\n\n respond_to do |format|\n if @encrypted_file.save\n format.html { redirect_to @encrypted_file, notice: 'Encrypted file was successfully created.' }\n format.json { render action: 'show', status: :created, location: @encrypted_file }\n else\n format.html { render action: 'new' }\n format.json { render json: @encrypted_file.errors, status: :unprocessable_entity }\n end\n end\n end",
"def output \n\tFile.open(\"#{@file_out}\", \"w+\") do |f|\n\t f.puts \"#{@encrypted_text}\"\n\tend \nend",
"def decrypt; end",
"def decrypted_path\n file_string = decrypted_file_string(@filename, @options['file_password'])\n Tempfile.create(['decrypted', '.docx'], encoding: file_string.encoding) do |file|\n file.write(file_string)\n file.close\n\n yield file.path\n end\n end",
"def encrypt string\n string\n end",
"def cipher; end",
"def encryption_server; end",
"def encrypted?\n\t\tFile.exists?(File.join(self.location, \"encrypted\"))\n\tend",
"def encrypt_files(names, key)\n\tenc_names = Array.new\n\tcounter = 0 \n\n\tputs 'Encrypting files...'\n\n\tnames.each do |name|\n\t\tenc_name = name.gsub('.xls', '.enc')\n\t\tencrypt(name, enc_name, key)\n\t\tenc_names << enc_name\n\t\tcounter += 1\n\tend\n\n\tputs 'done, encrypted ' + counter.to_s + ' files'\n\treturn enc_names\nend",
"def run_me\n bucket_name = 'doc-example-bucket'\n object_key = 'my-file.txt'\n object_content = 'This is the content of my-file.txt.'\n region = 'us-west-2'\n s3_client = Aws::S3::Client.new(region: region)\n\n if upload_file_encrypted_aes256_at_rest?(\n s3_client,\n bucket_name,\n object_key,\n object_content\n )\n puts 'File uploaded and encrypted.'\n else\n puts 'File not uploaded.'\n end\nend",
"def encript(text_to_encript)\n require 'base64'\n Base64.encode64(text_to_encript)\n\nend",
"def encryption_client; end",
"def encrypt()\n cipher_type = \"aes-128-ecb\"\n data = password;\n key = master_password;\n \n self.encrypted_password = aes_encrypt(data,key,nil,cipher_type).to_s\n end",
"def encrypt(string)\n CRYPTO.encrypt_string(string).to_64\nend",
"def test_it_can_cipher_a_file\n file = File.open(\"input.txt\", \"w\")\n file.write(\"I am in a file\")\n file.close\n assert_equal \"F xj fk x cfib\", @caesar.from_file(\"input.txt\")\nend",
"def read\n r = super\n lockbox_notify(\"decrypt_file\") { lockbox.decrypt(r) } if r\n end",
"def read\n r = super\n lockbox_notify(\"decrypt_file\") { lockbox.decrypt(r) } if r\n end",
"def encrypt(x)\n @aes_key.encrypt(x)\n end",
"def base64_encode\n Base64.encode64(file_contents)\n end",
"def encode_picture(file,output)\n enc = Base64.encode64(File.open(file,'rb'){|io| io.read})\n File.open(Rails.public_path.to_s + '/images/'+ output,'w') do |f|\n enc.gsub!(\"\\n\",'')\n f.write enc\n end\nend",
"def write_file(client, filename, cipher)\n client.puts(encrypt(ACCEPT, cipher))\n File.open(@dir + '/' + filename, 'w') do |file|\n client.each_line do |data|\n line = decrypt(data, cipher)\n if line == END_TRANS; puts 'File saved'\n else file.write(line + \"\\n\")\n end\n end\n end\n client.close\n end",
"def encrypt(content, offset)\n # Decrypting and crypting contains more or less the exact same code.\n # The difference between those two methods is that decrypting by default takes a positive offset\n # as negative in its calculations. That is why we, when calling the decrypting method, turn our offset\n # around. With that being said, we go from + to - or - to + before executing our method call.\n # As our encrypting method is supposed to return the encrypted content uppercase, \n # and decrypt returns the decrypted content lowercase, we call string's member method upcase.\n\n return decrypt(content, offset * -1).upcase\nend",
"def encrypt(data)\n cipher = OpenSSL::Cipher.new(\"AES-256-CBC\")\n cipher.encrypt\n cipher.iv = @enc_iv\n cipher.key = @enc_key\n encrypted = cipher.update(data) + cipher.final\n enc = Base64.strict_encode64(encrypted)\nend",
"def encrypt_data(data, key, iv, cipher_type)\n aes = OpenSSL::Cipher::Cipher.new(cipher_type)\n aes.encrypt\n aes.key = key\n aes.iv = iv if iv != nil\n aes.update(data) + aes.final\n end",
"def encrypt data, key, iv, cipher_type\n aes = OpenSSL::Cipher::Cipher.new cipher_type\n aes.encrypt\n aes.key = key\n aes.iv = iv if iv != nil\n aes.update(data) + aes.final \n end",
"def encrypt(data)\n crypto_key.encrypt64(data)\n end",
"def encrypt( data )\n rsa = OpenSSL::PKey::RSA.new( File.read( @public_pem ) )\n\n # encrypt with 256 bit AES with CBC\n aes = OpenSSL::Cipher::Cipher.new( 'aes-256-cbc' )\n aes.encrypt\n\n # use random key and IV\n aes.key = key = aes.random_key\n aes.iv = iv = aes.random_iv\n\n # this will hold all primitives and ciphertext\n primitives = {}\n\n primitives['ciphertext'] = aes.update( data )\n primitives['ciphertext'] << aes.final\n\n primitives['key'] = rsa.public_encrypt( key )\n primitives['iv'] = rsa.public_encrypt( iv )\n\n # serialize everything and base64 encode it\n Base64.encode64( primitives.to_yaml )\n end",
"def update (data)\n cipher = new_encryption_cipher\n encrypted_file_content = cipher.update(data.to_yaml) + cipher.final\n\n File.open(vault_path, 'wb') { |f| f.write(encrypted_file_content) }\n FileUtils.chmod(0600, vault_path)\n end",
"def encrypt(alg, password, string)\n \n begin\n case alg\n when \"3DES\" then key = EzCrypto::Key.with_password(password, $system_salt, :algorithm => 'des3')\n when \"AES\" then key = EzCrypto::Key.with_password(password, $system_salt, :algoritmh => 'aes256')\n when \"Blowfish\" then key =EzCrypto::Key.with_password(password, $system_salt, :algoritmh => 'blowfish')\n when 'Plaintext' then return string\n else key = EzCrypto::Key.with_password(password, $system_salt, :algorithm => 'aes256') \n end\n encrypted_text = key.encrypt64(string)\n rescue => e\n p e.message\n end\n return encrypted_text\n \n end",
"def encrypt(plain_password)\n self.class.encrypt(plain_password)\n end",
"def encrypt(*args, &block)\n crypt :encrypt, *args, &block\n end",
"def encrypt(data, key, iv, cipher_type)\n aes = OpenSSL::Cipher::Cipher.new(cipher_type)\n aes.encrypt\n aes.key = key\n aes.iv = iv if iv != nil\n aes.update(data) + aes.final\n end",
"def encrypt!(block) \n Bes.new(material).encrypt block\n end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def file; end",
"def encrypt(*attributes) \n \tinclude ActiveCrypto::Encrypted\n \tbefore_save :encrypt_attributes\n \tafter_save :decrypt_attributes\n options=attributes.last.is_a?(Hash) ? attributes.pop : {}\n keyholder\n if options and options[:key]\n \t\t\t\tmodule_eval <<-\"end;\"\t\t\t\t \n \t\t\t\t\tdef session_key\n \t\t\t\t\t\t(send :#{options[:key]} ).send :session_key\n \t\t\t\t\tend\t \n \t\t\t\t\t@@external_key=true\n \t\t\t\tend;\n end\n\n base64_encode = (options and options[:base64])\n module_eval <<-\"end;\"\n def self.ezcrypto_base64?\n #{base64_encode.to_s}\n end\n end;\n \n self.encrypted_attributes=attributes\n end",
"def aes_encrypt(data, key, iv, cipher_type)\n aes = OpenSSL::Cipher::Cipher.new(cipher_type)\n aes.encrypt\n aes.key = OpenSSL::PKCS5.pbkdf2_hmac_sha1(key, \"randomString\", 1024, aes.key_len)\n aes.iv = iv if iv != nil\n aes.update(data.to_s) + aes.final \n end",
"def update\n respond_to do |format|\n if @encrypted_file.update(encrypted_file_params)\n format.html { redirect_to @encrypted_file, notice: 'Encrypted file was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @encrypted_file.errors, status: :unprocessable_entity }\n end\n end\n end",
"def encrypt(value,key,context)\n cyphertext = @vault.logical.write(\"transit/encrypt/#{key}\", plaintext: Base64.encode64(value).gsub('\\n',''), context: Base64.encode64(context).gsub('\\n',''))\n return cyphertext.data[:ciphertext]\n end",
"def ca_file; end",
"def ca_file; end",
"def upload_file_encrypted_aes256_at_rest?(\n s3_client,\n bucket_name,\n object_key,\n object_content\n)\n s3_client.put_object(\n bucket: bucket_name,\n key: object_key,\n body: object_content,\n server_side_encryption: 'AES256'\n )\n return true\nrescue StandardError => e\n puts \"Error uploading object: #{e.message}\"\n return false\nend",
"def upload_key(s3client,newkeyblob,filename,bucket)\r\n keyfile_name= filename+ \".key\"\r\n newkeyblob64 = Base64.encode64(newkeyblob)\r\n s3client.put_object({\r\n body: newkeyblob64,\r\n key: keyfile_name,\r\n bucket: bucket\r\n })\r\nend",
"def encrypt(data, key=nil)\n Crypto.new(key.nil? ? config.key : key).encrypt(data)\n end",
"def decrypt(filename, key)\r\n\t\r\n\t_decrypted = File.open(\"decrypted.txt\", \"w+\")\r\n\t_encrypted = File.open(filename, \"r\")\r\n\t\r\n\t# initialize the keyword as an encryption key\r\n\t_key = EncryptionKey.new(key)\r\n\t\r\n\t\r\n\t# loop through each line, and then each character, modifying it by the current key value\r\n\t# then write the modified character to the output file\t\r\n\t_encrypted.each do |line|\r\n\t\tline.split(\"\").each do |encryptedChar|\r\n\t\t\ttemp = encryptedChar.ord\r\n\t\t\ttemp = temp - _key.nextModVal()\r\n\t\t\t\r\n\t\t\tif temp < 0\r\n\t\t\t\ttemp = temp + 256\r\n\t\t\tend\r\n\t\t\tdecryptedChar = temp.chr\r\n\t\t\t_decrypted << decryptedChar\r\n\t\tend\r\n\tend\r\n\t\r\nend",
"def run_me\n bucket_name = \"doc-example-bucket\"\n object_key = \"my-file.txt\"\n region = \"us-west-2\"\n kms_key_id = \"9041e78c-7a20-4db3-929e-828abEXAMPLE\"\n object_content = File.read(object_key)\n\n # Note that in the following call:\n # - key_wrap_schema must be kms_context for AWS KMS.\n # - To allow reading and decrypting objects that are encrypted by the\n # Amazon S3 V1 encryption client instead, use :v2_and_legacy instead of :v2.\n s3_encryption_client = Aws::S3::EncryptionV2::Client.new(\n region: region,\n kms_key_id: kms_key_id,\n key_wrap_schema: :kms_context,\n content_encryption_schema: :aes_gcm_no_padding,\n security_profile: :v2\n )\n\n if encrypted_object_uploaded?(\n s3_encryption_client,\n bucket_name,\n object_key,\n object_content\n )\n puts \"Uploaded.\"\n else\n puts \"Not uploaded.\"\n end\nend",
"def encrypt(text, iv)\n if @cipher\n cipher = OpenSSL::Cipher::Cipher.new(\"aes-#{@cipher}-cbc\")\n cipher.encrypt\n cipher.key = @secret[0..@cipher_block_size]\n if iv != \"\"\n cipher.iv = iv\n end\n cipher_text = cipher.update(text)\n cipher_text << cipher.final\n return Base64.encode64(cipher_text) #output in base64\n else\n return text\n end\n end",
"def encrypt(data)\n return nil if !@key\n Base64::encode64(@key.private_encrypt(data)).delete(\"\\n\").strip\n end",
"def encrypt_secret(path_to_public_key)\n @public_key = OpenSSL::PKey::RSA.new(File.read(path_to_public_key))\n @public_key.public_encrypt(@secret)\n end",
"def encript_ep \n\t\tsign_in_data = {\n\t\t\temail: email\n\t\t}\n\t\t\n\t sign_in_data.each do |key,attr_val|\n encrypted_data = AseEncriptDecriptJob::encryption(attr_val)\n self.send(\"#{key}=\",encrypted_data) \n\t end\n\tend",
"def encrypt(msg, key)\n Rails.logger.debug \"AesEncryptDecrypt::encrypt::BEGIN\"\n begin\n cipher = OpenSSL::Cipher.new(ALGORITHM)\n cipher.encrypt()\n cipher.padding = 1\n cipher.key = key\n crypt = cipher.update(msg) + cipher.final\n return (Base64.encode64(crypt))\n rescue Exception => exc\n Rails.logger.error (\"Error when encrypting message #{msg} is #{exc.message}\")\n raise exc\n end\n end",
"def ez_encrypt(worker = :default)\n _ez_encrypt(self, worker)\n end",
"def file_upload(file)\n file[\"//\"] = \"/\"\n file = ENV['RED5_HOME'] + \"/webapps/encrev1/#{file}\"\n request_url = \"#{@url}/file/demo\"\n request_url += \"?uid=#{@conf.uid}&sid=#{@conf.sid}\"\n $log.info \"Request filename : #{request_url}\"\n response = RestClient.put request_url, \"\"\n $log.info \"--> Got reponse : #{response}\"\n file_name = JSON.parse(response.to_str)['result']\n if file_name\n $log.info \"--> Got filename : #{file_name}\"\n request_url = \"#{@url}/file/demo/\"\n request_url += file_name\n request_url += \"?uid=#{@conf.uid}&sid=#{@conf.sid}\"\n $log.info \"Upload (#{file}) to Encre : #{request_url}\"\n response = RestClient.put request_url, File.read(file), :content_type => 'application/x-shockwave-flash'\n $log.info \"Delete #{file} ...\"\n file = File.delete(file)\n else\n file_name = nil\n end\n rescue\n file_name = nil\n $log.info \"... failed ! (check exception below)\"\n $log.info $!\n end",
"def encrypt(data)\n\t\t# The limit of the encryption scheme is 235 bytes, so if the string is longer than that we need to limit it\n\t\tif data.length > 234\n\t\t\tdata = data[0..234] + \"\\n\"\n\t\tend\n\n\t\tkey = OpenSSL::PKey::RSA.new File.read '../keys/attacker.pub'\n\t\treturn key.public_key.public_encrypt(data)\n\tend",
"def file\n end",
"def file\n end",
"def signed_or_encrypted; end",
"def validate\n encrypt\n end",
"def raw_encode()\n return Base64.encode64(File.read @file_path).delete(\"\\n\") if RUBY_VERSION < \"1.9.0\"\n Base64.strict_encode64(File.read @file_path)\n end",
"def read_encrypted_secrets=(_arg0); end",
"def read_encrypted_secrets=(_arg0); end"
] | [
"0.8163636",
"0.7333509",
"0.7324852",
"0.711056",
"0.71002173",
"0.7091229",
"0.69009787",
"0.6887336",
"0.6855868",
"0.6695875",
"0.6693692",
"0.6673215",
"0.6595401",
"0.6578915",
"0.6559419",
"0.654343",
"0.63634336",
"0.6324155",
"0.6296088",
"0.62679666",
"0.62218285",
"0.6214107",
"0.620464",
"0.6130559",
"0.61287063",
"0.6020313",
"0.5990434",
"0.5957748",
"0.59542584",
"0.58887184",
"0.58768183",
"0.58736867",
"0.5850825",
"0.58290684",
"0.5809668",
"0.5808815",
"0.5800895",
"0.57947785",
"0.5790438",
"0.5763114",
"0.57616186",
"0.5760501",
"0.57486814",
"0.57450396",
"0.57302904",
"0.57291347",
"0.57291347",
"0.57221866",
"0.57202345",
"0.57051617",
"0.57048863",
"0.5675063",
"0.5665735",
"0.56608427",
"0.5658851",
"0.5651045",
"0.562157",
"0.5616834",
"0.56163317",
"0.56152815",
"0.5615242",
"0.5605933",
"0.5603469",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.55998576",
"0.5594638",
"0.55830425",
"0.5578541",
"0.5556436",
"0.554776",
"0.554776",
"0.5547108",
"0.55428267",
"0.5530396",
"0.55200964",
"0.5515337",
"0.5514918",
"0.5506349",
"0.550408",
"0.550324",
"0.550297",
"0.54922754",
"0.54815644",
"0.54476285",
"0.54408604",
"0.54408604",
"0.543989",
"0.54356146",
"0.54205126",
"0.54192775",
"0.54192775"
] | 0.78390193 | 1 |
GET /idea_events/1 GET /idea_events/1.json | def show
@idea_event = IdeaEvent.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @idea_event }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def show\n event_id = params[:id]\n if event_id.present?\n @event = Com::Nbos::Events::Event.active_events.where(id: event_id, tenant_id: @user.tenant_id)\n if @event.present?\n render :json => @event\n else\n render :json => {messageCode: \"event.notfound\", message: \"Event Not Found\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end",
"def index\n @events = Event.find(:all)\n respond_to do |format|\n format.html\n format.json\n end\n end",
"def index\n #returns all events from eventbrite API, need to change to pull from her endpoint\n @eventList = Event.retrieve_all_events params\n render json: @eventList, status: 200\n end",
"def show\n @event = Event.find(params[:id])\n render json: @event\n end",
"def index\n @events = Event.all\n render json: @events, status: 200\n end",
"def index\n respond_to do |format|\n format.html\n format.json { render json: @events }\n end\n end",
"def index\n @event = Event.all\n render json: @event\n end",
"def new\n @event = Event.new\n\n @idea = Idea.find(params[:idea_id])\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json \n end\n end",
"def index\n @events = Event.where(adventure_id: params[:adventure_id])\n\n render json: @events\n end",
"def events\n url = 'https://api.artic.edu/api/v1/exhibitions?limit=35'\n\n res = RestClient.get(url)\n JSON.parse(res)\nend",
"def show\n @event_event = Event::Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event_event }\n end\n end",
"def index\n @events = Event.all\n respond_to do |format|\n format.html \n format.json do\n render :json => {events: @events}\n end\n end\n end",
"def show\n @myevent = Myevent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @myevent }\n end\n end",
"def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.json { render json: @events }\n end\n end",
"def index\n @events = getUpcomingEvents()\n \n @page_title = \"Events\"\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def show\n @event = Event.find(params[:id])\n @client = Client.find(@event.client_id)\n @event_type = EventType.find(@event.event_type_id)\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @events = Event.all\n render json: @events\n end",
"def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @upcoming_events = Event.upcoming\n @past_events = Event.past\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @events = Event.all\n\n render json: @events\n end",
"def show\n @current_event = CurrentEvent.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @current_event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event } \n end\n end",
"def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @event }\n end\n end",
"def show\n @event = Event.find(params[:id])\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @event }\n end\n end",
"def index\n @event = Event.find(params[:event_id])\n\n end",
"def index\n event = Event.find(params[:event_id])\n render json: event.route, status: :ok\n end",
"def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.live\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def show\n render json: @event\n end",
"def show\n render json: @event\n end",
"def show\n render json: @event\n end",
"def show\n render json: @event\n end",
"def show\n render json: @event\n end",
"def show\n redirect_to @event, status: :moved_permanently if request.path != event_path(@event)\n gon.push(\n events: event_path(format: :json),\n single_event: true,\n start_event: @event.start_date,\n end_event: @event.end_date\n )\n seo_tag_show event\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def show\n render json: EventSerializer.new(@event).as_json, status: 200\n end",
"def show\n render json: @event, status: :ok\n end",
"def index\n @events = Event.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render :json => @events }\n end\n end",
"def index\n respond_with(@events)\n end",
"def show\n @event = Event.find(params[:id])\n\n respond_to do |format|\n format.json { render json: @event, methods: [:talks] }\n end\n end",
"def new\n @idea_event = IdeaEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @idea_event }\n end\n end",
"def get(event_id)\n @client.request \"events/#{event_id}\"\n end",
"def index\n @events = Event.all\n @event = Event.new\n\n respond_to do |format|\n format.html\n format.json { render 'events/index', events: @events }\n end\n end",
"def index\n render json: Event.all, status: :ok\n end",
"def show\n # Mostra la descrizione di un evento (magari su una finestrella)\n render json: @event\n end",
"def index\n response = { events: Event.all }\n respond_to do |format|\n format.json { render json: response.to_json }\n format.html { render :index }\n end\n end",
"def show\n @events = fetch_events\n end",
"def index\n if params[:eid]\n @event = Event.find( params[:eid])\n else\n @event = Event.new\n # @event.start_on = Date.new(2015,1,1)\n end\n prepare_variable_for_index_template\n\n respond_to do |format|\n format.html\n format.xml {\n render :xml => @events.to_xml\n }\n format.json {\n render :json => @events.to_json\n }\n format.atom {\n @feed_title = \"My event list\"\n }\n end\n end",
"def details\n get(\"v1/event/#{@id}\")\n end",
"def events\n response = self.class.get('/v1/events.json')\n response.code == 200 ? JSON.parse(response.body) : nil\n end",
"def show\n @event = Event.includes(:user, :changelogs).find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @event }\n end\n end",
"def index\n @event = Event.find(:first)\n end",
"def show\n @event = Event.find(params[:id])\n\n render json: @event, include: :talks\n end",
"def past_events\n @events = Event.past\n render json: @events, include: :talks\n end",
"def show\n render json: format_event(@event)\n end",
"def index\n @events = current_user.events\n\n render json: @events\n end",
"def get_events\n Resources::Event.parse(request(:get, \"Events\"))\n end",
"def show\n @events_tag = EventsTag.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @events_tag }\n end\n end",
"def index\n @events = current_user.events\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @events }\n end\n end",
"def show\n \trender json: @event\n end"
] | [
"0.7197953",
"0.7134962",
"0.71243465",
"0.7120562",
"0.7075237",
"0.70402384",
"0.7034575",
"0.6994844",
"0.6988892",
"0.69875777",
"0.69693494",
"0.69682765",
"0.6964784",
"0.6964719",
"0.69442165",
"0.69442165",
"0.6905342",
"0.68974143",
"0.68942356",
"0.6893209",
"0.6886492",
"0.68864286",
"0.688583",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68855",
"0.68766814",
"0.6870286",
"0.68673664",
"0.68600994",
"0.6859453",
"0.68589026",
"0.6857588",
"0.68545115",
"0.68499434",
"0.68499434",
"0.6846852",
"0.6846852",
"0.68406767",
"0.68406767",
"0.68406767",
"0.68406767",
"0.68406767",
"0.6832828",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68272066",
"0.68162197",
"0.6811506",
"0.68076783",
"0.6791253",
"0.6776954",
"0.67710716",
"0.6771003",
"0.6765839",
"0.6760848",
"0.6732237",
"0.6729593",
"0.6704197",
"0.67024845",
"0.6701817",
"0.6698921",
"0.6680747",
"0.66743624",
"0.66695225",
"0.6667316",
"0.66671664",
"0.6660105",
"0.66583675",
"0.6656648",
"0.6652872",
"0.664661"
] | 0.76614374 | 0 |
GET /idea_events/new GET /idea_events/new.json | def new
@idea_event = IdeaEvent.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @idea_event }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @event = Event.new\n\n @idea = Idea.find(params[:idea_id])\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @create_event = CreateEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @create_event }\n end\n end",
"def new\n @event = Event.new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n # @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n\n # render :action => 'new'\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @event }\n end\n end",
"def new\n @event = current_user.events.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event_event = Event::Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_event }\n end\n end",
"def new\n @event = Event.new\n @event.time = Time.now\n \n @page_title = \"New Event\"\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @current_event = CurrentEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @current_event }\n end\n end",
"def new\n @event=Event.new \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @project.events }\n end\n end",
"def new\n assign_new_event\n\n respond_to do |format|\n format.html { render :edit }\n format.js { render(:update) { |page| page.redirect_to(action: :new, event: event_params) } }\n end\n end",
"def new\n\t\t@event = Event.new\n\n\t\trespond_to do |format|\n\t\t\tformat.html # new.html.erb\n\t\t\tformat.json { render json: @event }\n\t\tend\n\tend",
"def create\n @idea_event = IdeaEvent.new(params[:idea_event])\n\n respond_to do |format|\n if @idea_event.save\n format.html { redirect_to @idea_event, notice: 'Idea event was successfully created.' }\n format.json { render json: @idea_event, status: :created, location: @idea_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @idea_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @event_request = EventRequest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_request }\n end\n end",
"def new\n @event = Event.new(event_type: params[:event_type])\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n @needs = Need.all\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = @current_account.events.new(Event.defaults) # TODO, set this account-wid\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @myevent = Myevent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @myevent }\n end\n end",
"def new\n @events_tag = EventsTag.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @events_tag }\n end\n end",
"def new\n @planned_event = PlannedEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @planned_event }\n end\n end",
"def new\n setup_variables\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event_interest = EventInterest.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_interest }\n end\n end",
"def new\n @evento = Evento.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @evento }\n end\n end",
"def new\n @activity_event = ActivityEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @activity_event }\n end\n end",
"def new\n @main_event = MainEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @main_event }\n end\n end",
"def new\n @event = Event.new\n @show_place = \"show-place\"\n @show_address = \"hidden-place\"\n @toogle_map = -1\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @calevent = Calevent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @calevent }\n end\n end",
"def new\n @event = Event.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @entry = Entry.new(dob: Date.new(Date.today.year - 18, 1, 1))\n get_events\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @entry }\n end\n end",
"def new\n @event = current_user.events.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n format.js # return the HTML block for use by the AJAX new.js.erb\n end\n end",
"def new\n @new_event = Event.new\n end",
"def new\n @event = Event.new\n #@venues = Venue.all\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new_default\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n @event.user_id = current_user.id\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event = Event.new\n @name = params[:name]\n @description = params[:description]\n @location = params[:location]\n @start_time = params[:start_time]\n @end_time = params[:end_time]\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def create\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @idea = @agenda.ideas.build\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @idea }\n end\n end",
"def create\n puts params[:event]\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @event = Event.new\n @userId = session[:user_id]\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @calendar_event = CalendarEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @calendar_event }\n end\n end",
"def new\n @event = Event.new\n @categories = Category.all or []\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @event_schedule = EventSchedule.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event_schedule }\n end\n end",
"def new\n @users_event = UsersEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @users_event }\n end\n end",
"def new\n @event = Event.new\n @title = 'New Event'\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n @page_title = \"Add an Event\"\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def create\n @event = Event.new(params[:event])\n @event.url = BASE_URL + @event.name.gsub(' ', '_')\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @eatvent = Eatvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @eatvent }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def new\n @event = Event.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @event }\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to \"/#{@event.url}\" }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @event = Event.new\n @action = params[:action]\n \n #場所選択からの遷移の場合\n if session[:event]\n @event['open_at'] = session[:event][:open_at]\n @event['name'] = session[:event][:name]\n end\n \n if params[:place] && params[:place].to_i > 0\n if @place = Place.find(params[:place])\n @event['place_id'] = params[:place]\n @place_name = @place.name\n end\n end\n \n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @event }\n end\n end",
"def new\n @hive_event = HiveEvent.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @hive_event }\n end\n end"
] | [
"0.8198065",
"0.7807702",
"0.7785182",
"0.77731454",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.7770926",
"0.77295256",
"0.7713353",
"0.7677956",
"0.76687187",
"0.76488435",
"0.7601687",
"0.7596627",
"0.75368524",
"0.75232047",
"0.751699",
"0.74965966",
"0.74711126",
"0.7466202",
"0.7461007",
"0.7449128",
"0.7440849",
"0.74214214",
"0.7368811",
"0.7341973",
"0.7320514",
"0.7282631",
"0.7257099",
"0.72231144",
"0.7215894",
"0.72107685",
"0.72100914",
"0.7209781",
"0.71883196",
"0.7166626",
"0.7161604",
"0.71507394",
"0.71484166",
"0.7129836",
"0.7115339",
"0.7114374",
"0.71003944",
"0.70930827",
"0.709147",
"0.709005",
"0.7084459",
"0.7075912",
"0.7068707",
"0.70608586",
"0.70574194",
"0.70536166",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7052546",
"0.7051215",
"0.70487267",
"0.704272",
"0.7034274"
] | 0.8179622 | 1 |
POST /idea_events POST /idea_events.json | def create
@idea_event = IdeaEvent.new(params[:idea_event])
respond_to do |format|
if @idea_event.save
format.html { redirect_to @idea_event, notice: 'Idea event was successfully created.' }
format.json { render json: @idea_event, status: :created, location: @idea_event }
else
format.html { render action: "new" }
format.json { render json: @idea_event.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n event = Event.new(event_params)\n event.save!\n render json: event\n end",
"def create\n @event = Event.new(params[:event])\n\n if @event.save\n render json: @event, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def create\n Rails.logger.debug(\"Received event #{params[:event]}\")\n head :ok\n end",
"def create\n megam_rest.post_event(to_hash)\n end",
"def create_event event, data={}\n data[:event] = event\n post '/event', data\n end",
"def create_events\n end",
"def create\n @event = Event.new(event_params)\n if @event.save\n head :created\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def create\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def save\n event = params\n # This assumes that all keys exists. Yay no error handling...\n toSave = Event.new(update_type: event[:event],\n start_time: event[:payload][:event][:start_time_pretty],\n end_time: event[:payload][:event][:end_time_pretty],\n location: event[:payload][:event][:location],\n invitee_name: event[:payload][:invitee][:name],\n duration: event[:payload][:event_type][:duration],\n event_kind: event[:payload][:event_type][:kind])\n toSave.save\n render json: {}, status: 200\n end",
"def create\n @event = Event.new(event_params)\n @event.organizer = current_user\n\n if @event.save\n render json: @event, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def push_events\n saved = []\n jsonHash = request.POST[:_json];\n jsonHash.each do |jsonEvent|\n event = Event.new\n event.race_id = jsonEvent[\"raceId\"]\n event.walker_id = jsonEvent[\"walkerId\"]\n event.eventId = jsonEvent[\"eventId\"]\n event.eventType = jsonEvent[\"type\"]\n event.eventData = jsonEvent[\"data\"]\n event.batteryLevel = jsonEvent[\"batL\"]\n event.batteryState = jsonEvent[\"batS\"]\n event.timestamp = Time.zone.parse(jsonEvent[\"time\"])\n if event.save # if new\n saved << jsonEvent[\"eventId\"]\n if event.race_id != 0 # if not unknown race_id\n after_create(event)\n end\n else # if exists\n saved << jsonEvent[\"eventId\"]\n puts \"Not Saved!\" # debug print\n puts jsonEvent # debug print \n end\n end\n render :json => {:savedEventIds => saved}\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.json { render :show, status: :created, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.json { render :show, status: :created, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n puts params[:event]\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = @team.events.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.js { render :refresh, status: :created, location: [@team, @event] }\n else\n format.js { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.create(params[:event])\n respond_to do |format|\n if @event.save\n current_project.events<<@event\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n if @event.save\n \tdata = { data: @event, status: :created, message: \"Event was successfully created.\" }\n render :json => data\n else\n \tdata = { data: @event.errors, status: :unprocessable_entity }\n render :json => data\n end\n end",
"def create\n # render json: params[:event]\n temp_event = Event.create(\n name: params[:event][:name],\n location: params[:event][:location],\n date: params[:event][:date],\n time: params[:event][:time],\n budget: params[:event][:budget],\n user: current_user\n )\n redirect_to \"/items?event=#{temp_event.id}\"\n end",
"def create\n @event_event = Event::Event.new(params[:event_event])\n\n respond_to do |format|\n if @event_event.save\n format.html { redirect_to @event_event, notice: 'Event was successfully created.' }\n format.json { render json: @event_event, status: :created, location: @event_event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to \"/#{@event.url}\" }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: t(:event_created) }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @user = User.find_by_authentication_token(params[:auth_token])\n @event = Event.new.from_json(params[:event])\n @event.user_id = @user.id\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n result = Event::CreateEvent.perform(event_context)\n\n respond_to do |format|\n if result.success?\n @event = result.event\n format.json { render action: 'show', status: :created }\n else\n format.json { render json: { :errors => result.errors.full_messages }, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: \"Event #{@event} was successfully created.\" }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Events::Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n if @event.save\n render :show, status: :created, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, event: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: events_path(@event) }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \n @event = Event.new(event_params)\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n params[:event] = convert_datetimes( params[:event] )\n @event = @current_account.events.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to \"/team/calendar\", notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @dia_evento = DiaEvento.new(dia_evento_params)\n\n if @dia_evento.save\n render json: @dia_evento, status: :created, location: @dia_evento\n else\n render json: @dia_evento.errors, status: :unprocessable_entity\n end\n end",
"def create\n params['user_id'] = current_user.id if current_user\n @event = Event.new(event_params)\n\n if @event.save\n render json: { location: format_event(@event) }, status: :created\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def create\r\n @event = Event.new(event_params)\r\n convert_timezone @event\r\n event_type_status @event\r\n if @event.save_without_exception\r\n update_theme @event\r\n add_event_categories @event\r\n add_event_location @event\r\n create_group_guest_list @event\r\n add_photos @event\r\n # Create Groups and contacts through CSV\r\n contacts_imports\r\n render json: SuccessResponse.new(\r\n code: 200, message: 'Event Created.', location: '/events/List?id=' + @event.id.to_s, eventID: @event.id\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new, adapter: :json, status: :unprocessable_entity\r\n end\r\n end",
"def create\n @event = Event.new(params[:event])\n @event.url = BASE_URL + @event.name.gsub(' ', '_')\n \n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to new_event_agenda_entry_path(:event_id => @event.id), notice: 'Evento creado, proceda a llenar la agenda'}\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to dashboard_home_path }\n format.json { render 'event', status: :created, event: @event }\n else\n format.html { render dashboard_home_path }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = current_user.created_events.build(event_params)\n @upcoming_events = Event.upcoming_events.order('created_at DESC')\n @past_events = Event.past_events.order('created_at DESC')\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to event_path(@event), notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :index, alert: 'Event was not created. Please try again!' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(params[:event])\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to events_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @myevent = Myevent.new(params[:myevent])\n\n respond_to do |format|\n if @myevent.save\n format.html { redirect_to @myevent, notice: 'Myevent was successfully created.' }\n format.json { render json: @myevent, status: :created, location: @myevent }\n else\n format.html { render action: \"new\" }\n format.json { render json: @myevent.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n logger.debug @event.errors.inspect\n format.html { redirect_to @event, notice: 'データが新規作成されました。' }\n format.json { render :show, status: :created, location: @event }\n else\n logger.debug @event.errors.to_hash(true)\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, success: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to new_event_path, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: \"Event was successfully created.\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n \t@expected_event = ExpectedEvent.new(expected_event_params)\n\n \trespond_to do |format|\n \t\tif @expected_event.save\n \t\t\tformat.html { redirect_to @expected_event, notice: 'Incoming event was successfully created'}\n \t\telse\n \t\t\tformat.html { render action: 'new' }\n \t\tend\n \tend\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Aula cadastrada com sucesso.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully created.' }\n format.json { render json: @event, status: :created, location: @event }\n else\n format.html { render action: \"new\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to action: :index, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, flash: {success: 'Event was successfully created.'} }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #if selected_courses_course_path\n # @event = current_user.events.build(params.require(:event).permit(:title, :starts_at, :ends_at,\n # :all_day, :description))\n #end\n\n @event = current_user.events.build(event_params)\n\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, :notice => 'Event was successfully created.' }\n format.json { render :json => @event, :status => :created, :location => @event }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def event\n @event = Event.new\n @event.title = params[:title].to_s\n @event.description = params[:description].to_s\n @event.date = Date.new\n @event.latitude = 15.2\n @event.longitude = 10.6\n @event.team_id = Team.find(params[:id]).id\n @event.user_id = current_user.id\n\n respond_to do |format|\n if @event.save\n format.html {redirect_to events_team_path, notice: 'User team was successfully created.'}\n format.json {render :show, status: :created, location: @event}\n else\n format.html {redirect_to team_path}\n format.json {render json: @team.errors, status: :unprocessable_entity}\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n @event.user_id = current_user.id\n respond_to do |format|\n if @event.save\n # イベントのステータスで落選に変更されたら、企業のステータスを選考済みに変更\n if @event.is_passed == 'droped'\n company = Company.find(@event.company_id)\n company.is_active = false\n company.save\n end\n format.html { redirect_to @event, notice: \"イベントの作成に成功しました。\" }\n format.json { render :show, status: :created, location: @event }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n respond_to do |format|\n build_event\n if save_event_and_timeline\n format.html { redirect_to_event_or_timeline }\n format.json { render action: 'show', status: :created, location: @event }\n else\n format.html { render action: 'new' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @event = Event.new(event_params)\n\n respond_to do |format|\n if @event.save\n set_event_unique_active @event if @event.active\n format.html { redirect_to team_events_url, notice: 'Event was successfully created.' }\n format.json { render :show, status: :created, location: team_events_url }\n else\n format.html { render :new }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.71049494",
"0.70508987",
"0.7032693",
"0.70037943",
"0.69367915",
"0.6895117",
"0.6892014",
"0.6888186",
"0.6881555",
"0.68300587",
"0.68238586",
"0.6814062",
"0.6814062",
"0.6804513",
"0.6785903",
"0.67474085",
"0.6739564",
"0.67283446",
"0.67163396",
"0.6708329",
"0.6707064",
"0.6700559",
"0.6695815",
"0.66950804",
"0.6688826",
"0.6688416",
"0.66827375",
"0.6679978",
"0.6677805",
"0.6677465",
"0.66544765",
"0.6653772",
"0.66441864",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.66389346",
"0.6636034",
"0.6625623",
"0.6623943",
"0.66203654",
"0.6613133",
"0.6594906",
"0.65933084",
"0.65880954",
"0.65875953",
"0.65828276",
"0.6580357",
"0.6580357",
"0.6575938",
"0.65693885",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.65627503",
"0.6559639",
"0.65578556",
"0.65578556",
"0.6557715",
"0.65518767",
"0.6551548",
"0.6544413",
"0.6538706",
"0.65291464",
"0.6521901",
"0.65191287",
"0.6514956",
"0.6512185"
] | 0.75621074 | 0 |
PUT /idea_events/1 PUT /idea_events/1.json | def update
@idea_event = IdeaEvent.find(params[:id])
respond_to do |format|
if @idea_event.update_attributes(params[:idea_event])
format.html { redirect_to @idea_event, notice: 'Idea event was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @idea_event.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update\n if @event.update(event_params)\n render json: @event, status: 201\n else\n render json: { message: \"Error. Error. Please try again.\"}, status: 400\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { head :no_content }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\tif @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n if @event.update(params[:event])\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render :show, status: :ok, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render :show, status: :ok, location: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @event.update(event_params)\n render json: { location: format_event(@event) }\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @event.update(event_params(params))\n render json: @event, status: 200\n else\n render :json => @event.errors, :status => 422\n end\n end",
"def update\n # @event = Event.find(params[:id])\n\n if @event.update(event_params)\n head :no_content\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n \n \n @event = Event.find(params[:id])\n\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: t(:event_updated) }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n \n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.json { render json: @event }\n else\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.using(:shard_one).find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n\n\n\n end",
"def update\n if @event.update(event_params)\n render :show, status: :ok, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n if @event.update(event_params)\n render :show, status: :ok, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\r\n @event.update(event_params)\r\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event_event = Event::Event.find(params[:id])\n\n respond_to do |format|\n if @event_event.update_attributes(params[:event_event])\n format.html { redirect_to @event_event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event_event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n debugger\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n @event.save!\n end\n end",
"def update\n @event.update(event_params)\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, flash: {success: 'Event was successfully created.'} }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event.update(event_params) \n end",
"def update\n\n respond_to do |format|\n if @event.update(event_params)\n\n format.html { redirect_to @event }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def put_events(args)\n\tapi_url = \"#{@base_url}/#{args[:collection]}/#{args[:key]}/events/#{args[:event_type]}\"\n\tputs do_the_put_call( url: api_url, user: @user, json: args[:json] )\nend",
"def update\n event_id = params[:id]\n if event_id.present? && params[:event].present? && @user.uuid.present? && @user.uuid != \"guest\"\n event_params = params[:event]\n @event = Com::Nbos::Events::Event.where(id: params[:id], user_id: @user.id ).first\n if @event.present?\n @event.update(event_params.permit!)\n if @event.save\n render :json => @event\n else\n data = add_error_messages(@event)\n render :json => data\n end\n else\n render :json => {\"messageCode\": \"module.user.unauthorized\", \"message\": \"Unauthorized to update others Event\"}, status: 404\n end\n else\n render :json => {messageCode: \"bad.request\", message: \"Bad Request\"}, status: 400\n end\n end",
"def update\n @event = Event.find(params[:id])\n if @event.update(event_params)\n render :show, status: :ok, location: @event\n else\n render json: @event.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n @event.update(status: \"Pending\")\n else\n @reopen = true\n format.json { render json: @event.errors, status: :unprocessable_entity }\n format.html { render :show }\n end\n end\n end",
"def update\n @event.title = event_params['title']\n respond_to do |format|\n if @event.save\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n # @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n #@event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n set_event_unique_active @event if @event.active\n format.html { redirect_to team_events_url, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: team_events_url }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n return forbidden unless user_is_owner\n return bad_request unless @event.update_attributes(event_params)\n render json: @event, status: :ok\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, :notice => 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @event.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n\n\n\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to '/', notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Goal activity was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { redirect_to @event }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @event = Event.find(params[:id])\n\n respond_to do |format|\n if @event.update_attributes(params[:event])\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event.update(event_params)\n format.html { redirect_to @event, notice: 'Event was successfully updated.' }\n format.json { render :show, status: :ok, location: @event }\n else\n format.html { render :edit }\n format.json { render json: @event.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.717151",
"0.7011583",
"0.69779676",
"0.6961609",
"0.6927854",
"0.6927854",
"0.6925483",
"0.6925483",
"0.6918708",
"0.6892866",
"0.688861",
"0.6880861",
"0.68747026",
"0.68413895",
"0.6805405",
"0.6805405",
"0.67964655",
"0.67947567",
"0.67864186",
"0.6774603",
"0.67690945",
"0.6766896",
"0.67530483",
"0.6749418",
"0.67362136",
"0.67261076",
"0.6718293",
"0.67152643",
"0.67136717",
"0.6707294",
"0.6706468",
"0.6704614",
"0.6700533",
"0.66812485",
"0.66779774",
"0.66763526",
"0.66763526",
"0.66763526",
"0.6673943",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6668628",
"0.6665453",
"0.6665453",
"0.6665453",
"0.6665453",
"0.6665453",
"0.6665453",
"0.6665453",
"0.6665453",
"0.66653675",
"0.6660966",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.6658837",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733",
"0.66582733"
] | 0.7532498 | 0 |
DELETE /idea_events/1 DELETE /idea_events/1.json | def destroy
@idea_event = IdeaEvent.find(params[:id])
@idea_event.destroy
respond_to do |format|
format.html { redirect_to idea_events_url }
format.json { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @events = Event.where(event_id: params[:id])\n @events.each.destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def delete_event\n if params[:id]\n @e = Evento.find(params[:id]).destroy\n end\n render :json => msj = { :status => true, :message => 'ok'}\n end",
"def destroy\n # @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n #@event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.using(:shard_one).find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n \n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @myevent = Myevent.find(params[:id])\n @myevent.destroy\n\n respond_to do |format|\n format.html { redirect_to myevents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_event = Event::Event.find(params[:id])\n @event_event.destroy\n\n respond_to do |format|\n format.html { redirect_to event_events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n\n end",
"def destroy\n @event = Event.find(params[:id])\n #@event.update_attribute(:deleted, true)\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :ok }\n end\n end",
"def delete_event\r\n event = Event.find_by(id: params[:eventid].to_i)\r\n if event.present?\r\n event.update(status: 3)\r\n lt_update_event_status event, 'archived'\r\n render json: SuccessResponse.new(\r\n code: 200,\r\n message: 'Event Deleted.'\r\n ), adapter: :json, status: :ok\r\n else\r\n render json: ErrorResponse.new(\r\n code: 404,\r\n message: 'Event not found!'\r\n ), adapter: :json, status: :not_found\r\n end\r\n\r\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url}\n format.json { head :no_content }\n end\n end",
"def destroy\n @calevent = Calevent.find(params[:id])\n @calevent.destroy\n\n respond_to do |format|\n format.html { redirect_to calevents_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @client = Client.find(@event.client_id)\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_to_client_path(@client) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n\n sync_destroy @event\n\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event_request = EventRequest.find(params[:id])\n @event_request.destroy\n\n respond_to do |format|\n format.html { redirect_to event_requests_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @hack.destroy\n\n respond_to do |format|\n format.html { redirect_to event_url(@event) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html {redirect_to events_url}\n format.json {head :no_content}\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_events_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n @eventtype.events.each do |e|\n e.destroy\n end\n @eventtype.destroy\n respond_to do |format|\n format.html { redirect_to eventtypes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @attend_event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Request successfully rescinded.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to scrappers_url, notice: 'Event was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @post_event.destroy\n respond_to do |format|\n format.html { redirect_to post_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to top_index_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n head :no_content\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to admin_events_url, notice: '比赛项目删除成功' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to root_url, notice: 'Event was successfully removed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url, notice: t(:event_deleted) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bom_event.destroy\n respond_to do |format|\n format.html { redirect_to bom_events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event.destroy\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'データが削除されました。' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = EventPost.find(params[:id])\n @event.destroy\n track_activity @event\n respond_to do |format|\n format.html { redirect_to events_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @eatvent = Eatvent.find(params[:id])\n @eatvent.destroy\n\n respond_to do |format|\n format.html { redirect_to eatvents_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @evento = Evento.find(params[:id])\n @evento.destroy\n\n respond_to do |format|\n format.html { redirect_to eventos_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @event.destroy\n\n respond_to do |format|\n format.html { redirect_to events_url, notice: 'Мероприятие успешно удалено.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @event = Event.find(params[:id])\n @event.destroy\n render :nothing => true, :status => 200, :content_type => 'text/plain'\n end"
] | [
"0.7356678",
"0.7356678",
"0.7356678",
"0.72667587",
"0.7262428",
"0.725565",
"0.72413594",
"0.72362095",
"0.7225875",
"0.7220038",
"0.7208296",
"0.7172826",
"0.71633935",
"0.7155159",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71438223",
"0.71248543",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.71244293",
"0.7124326",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.7121239",
"0.71206117",
"0.71116436",
"0.70957637",
"0.7075383",
"0.7060027",
"0.70514846",
"0.70464855",
"0.70456797",
"0.7041581",
"0.70401984",
"0.7036231",
"0.7032882",
"0.7031781",
"0.7030195",
"0.7017769",
"0.70153",
"0.70083433",
"0.7001671",
"0.69938207",
"0.6991045",
"0.6981649",
"0.6974969",
"0.6974028",
"0.69723797",
"0.696969"
] | 0.7519186 | 0 |
Use callbacks to share common setup or constraints between actions. | def set_step
@step = @recipe_item.steps.find(params[:id])
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def step_params
params.require(:step).permit(:recipe_item_id, :comment, cards: [:c_type, :c_id, :c_varient]
)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def param_whitelist\n [:role, :title]\n end",
"def expected_permitted_parameter_names; end",
"def safe_params\n params.except(:host, :port, :protocol).permit!\n end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end",
"def valid_params_request?; end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def permitted_params\n []\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end",
"def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end",
"def safe_params\n params.require(:user).permit(:name)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def check_params; true; end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def quote_params\n params.permit!\n end",
"def valid_params?; end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend",
"def filtered_parameters; end",
"def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end",
"def filtering_params\n params.permit(:email, :name)\n end",
"def check_params\n true\n end",
"def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend",
"def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end",
"def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end",
"def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend",
"def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end",
"def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end",
"def active_code_params\n params[:active_code].permit\n end",
"def filtering_params\n params.permit(:email)\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end",
"def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end",
"def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end",
"def list_params\n params.permit(:name)\n end",
"def filter_parameters; end",
"def filter_parameters; end",
"def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def url_whitelist; end",
"def admin_social_network_params\n params.require(:social_network).permit!\n end",
"def filter_params\n params.require(:filters).permit(:letters)\n end",
"def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end",
"def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end",
"def sensitive_params=(params)\n @sensitive_params = params\n end",
"def permit_request_params\n params.permit(:address)\n end",
"def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end",
"def secure_params\n params.require(:location).permit(:name)\n end",
"def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end",
"def question_params\n params.require(:survey_question).permit(question_whitelist)\n end",
"def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end",
"def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end",
"def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end",
"def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end",
"def url_params\n params[:url].permit(:full)\n end",
"def backend_user_params\n params.permit!\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end",
"def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end",
"def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end",
"def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end",
"def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end",
"def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end"
] | [
"0.69792545",
"0.6781151",
"0.67419964",
"0.674013",
"0.6734356",
"0.6591046",
"0.6502396",
"0.6496313",
"0.6480641",
"0.6477825",
"0.64565",
"0.6438387",
"0.63791263",
"0.63740575",
"0.6364131",
"0.63192815",
"0.62991166",
"0.62978333",
"0.6292148",
"0.6290449",
"0.6290076",
"0.62894756",
"0.6283177",
"0.6242471",
"0.62382483",
"0.6217549",
"0.6214457",
"0.6209053",
"0.6193042",
"0.6177802",
"0.6174604",
"0.61714715",
"0.6161512",
"0.6151757",
"0.6150663",
"0.61461",
"0.61213595",
"0.611406",
"0.6106206",
"0.6105114",
"0.6089039",
"0.6081015",
"0.6071004",
"0.60620916",
"0.6019971",
"0.601788",
"0.6011056",
"0.6010898",
"0.6005122",
"0.6005122",
"0.6001556",
"0.6001049",
"0.59943926",
"0.5992201",
"0.59909594",
"0.5990628",
"0.5980841",
"0.59669393",
"0.59589154",
"0.5958826",
"0.5957911",
"0.5957385",
"0.5953072",
"0.59526145",
"0.5943361",
"0.59386164",
"0.59375334",
"0.59375334",
"0.5933856",
"0.59292704",
"0.59254247",
"0.5924164",
"0.59167904",
"0.59088355",
"0.5907542",
"0.59064597",
"0.5906243",
"0.5898226",
"0.589687",
"0.5896091",
"0.5894501",
"0.5894289",
"0.5891739",
"0.58860534",
"0.5882406",
"0.587974",
"0.58738774",
"0.5869024",
"0.58679986",
"0.5867561",
"0.5865932",
"0.5864461",
"0.58639693",
"0.58617616",
"0.5861436",
"0.5860451",
"0.58602303",
"0.5854586",
"0.58537364",
"0.5850427",
"0.5850199"
] | 0.0 | -1 |
Convert the freebase ID for a type into an API endpoint ID | def to_endpoint id
t=id.split('/')
domain = t[1..-2].join('_')
type = t[-1]
"freebase_tsv_#{domain}__#{type}"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def input_id_from_type(type); end",
"def input_id_from_type(type)\n id = input_name_from_type(type).gsub(/([\\[(])|(\\]\\[)/, \"_\").gsub(/[\\])]/, \"\")\n id = @options[:namespace] + \"_\" + id if @options[:namespace]\n\n id\n end",
"def to_global_id(type_name, id)\n Base64.strict_encode64(\"#{type_name}-#{id}\")\n end",
"def af_type_id\n @af_type_id ||= self.name.gsub(/.*::/, '').split('').grep(/[A-Z]/).join.downcase\n end",
"def freebase_id\n to_nil _response_entity.fetch(\"freebaseId\", nil)\n end",
"def uwt_id\n GraphQL::Schema::UniqueWithinType.encode(self.class.name, id)\n end",
"def type\r\n\t\t\t`#{BITS::BITSADMIN} /gettype {#{@id}}`\r\n\t\tend",
"def make_ref_from_oid(object_type, object_id)\n return \"/#{object_type}/#{object_id}\"\n end",
"def account_type_to_param(account_type)\n account_type.to_s.underscore.tr(\"/\", \"_\")\n end",
"def find_phoneable_type\n %w(organization person).each do |type|\n param_name = [type, 'id'].join('_').to_sym\n requested_id = params[param_name]\n return type.to_sym unless requested_id.blank?\n end\n nil\n end",
"def type_id\n @type_id ||= extract_int(@content[0...TYPE_SIZE])\n end",
"def identifier_type_friendly\n return nil if identifier_type.blank?\n\n IdentifierTypesStrToFull[identifier_type]\n end",
"def make_id\n \"#{self.class.name.downcase}#{id}\"\n end",
"def id\n \"#{kind}_#{@id}\"\n end",
"def id\n \"#{kind}_#{@id}\"\n end",
"def polymorphic_id_param\n \"#{key}_id\".to_sym\n end",
"def find_websiteable_type\n %w(organization person).each do |type|\n param_name = [type, 'id'].join('_').to_sym\n requested_id = params[param_name]\n return type.to_sym unless requested_id.blank?\n end\n nil\n end",
"def get_id\n default_id = self.class.to_s.split('::').last\n default_id[0] = default_id[0].downcase\n return default_id\n end",
"def get_typename_from_id(id)\n case id.chars[0]\n when 'T'\n 'trackId'\n when 'A'\n 'artistId'\n when 'B'\n 'albumId'\n when 'L'\n 'curatedStationId'\n else\n 'stationId'\n end\n end",
"def resource_type\n /(\\/api\\/(?<type>\\w+)\\/?)/ =~ full_url\n type.capitalize\n end",
"def name_and_id_from_options(options, type) #:nodoc:\n options[:name] = (options[:prefix] || DEFAULT_PREFIX) + (options[:discard_type] ? '' : \"[#{type}]\")\n options[:id] = options[:name].gsub(/([\\[\\(])|(\\]\\[)/, '_').gsub(/[\\]\\)]/, '').gsub(/\\./, '_').gsub(/_+/, '_')\n end",
"def name_and_id_from_options(options, type) #:nodoc:\n options[:name] = (options[:prefix] || DEFAULT_PREFIX) + (options[:discard_type] ? '' : \"[#{type}]\")\n options[:id] = options[:name].gsub(/([\\[\\(])|(\\]\\[)/, '_').gsub(/[\\]\\)]/, '').gsub(/\\./, '_').gsub(/_+/, '_')\n end",
"def parse_inst_type(type)\n return type.downcase.gsub(\"_\", \".\")\n end",
"def short_code\n id.to_s 36\n end",
"def api_name\n to_s.split('::').last.gsub(/[^\\A]([A-Z])/, '_\\\\1').downcase\n end",
"def genURI(c_type)\n if c_type == 'BTC'\n c_uri = '/v1/ticker/bitcoin'\n elsif c_type == 'LTC'\n c_uri = '/v1/ticker/litecoin'\n elsif c_type == 'ETH'\n c_uri = '/v1/ticker/ethereum'\n elsif c_type == 'XRP'\n c_uri = '/v1/ticker/ripple'\n else\n c_uri = '/v1/ticker/notsupported'\n end\n return c_uri\nend",
"def generate_gid(type, id)\n Base64.strict_encode64([\"0\", type.length, \":\", type, id.to_s].join)\nend",
"def typed_id\n self.class.name + ':' + self.id.to_s\n end",
"def to_id\n\t\treturn self.\n\t\t\tgsub(\"::\", \"\").\n\t\t\tgsub(/([A-Z]+)([A-Z][a-z])/,'\\1_\\2').\n\t\t\tgsub(/([a-z\\d])([A-Z])/,'\\1_\\2').\n\t\t\tdowncase.\n\t\t\tgsub(\"_\", \"-\")\n\tend",
"def type\n @type ||= IDS_TYPES[type_id]\n end",
"def id\n @id || self.class.name.underscore.split('/').last #gsub('/', '_')\n end",
"def location_string(type, id, version=nil)\n link = Conf.base_uri\n case type.downcase\n when \"workflow\"; link += \"/workflows/\"\n when \"blob\"; link += \"/files/\"\n when \"pack\"; link += \"/packs/\"\n else; return( link += \"/home\" )\n end\n \n link += id.to_s\n link += \"?version=#{version}\" if version\n \n return link\n end",
"def appeal_status_id\n \"A#{id}\"\n end",
"def it_doc_id\n type_name + ' ' + self.id.to_s\n end",
"def gen_inst_id_str(inst_str)\n return inst_str.gsub(/[\\.:\\[\\]]/,'_').upcase\nend",
"def api_gift_id\n return nil unless (extended_api_gift_id = read_attribute(:api_gift_id))\n encrypt_remove_pre_and_postfix(extended_api_gift_id, 'api_gift_id', 24)\n end",
"def api_id\n chip_api.tmp_api_id\n end",
"def surrogate_type\n case @surrogate_method\n when 'guid'\n [\"Guid\", {auto_assign: 'guid'}]\n when 'hash'\n hash_type\n else # counter\n type_name, min, max, length = choose_integer_range(0, 2**(default_autoincrement_length-1)-1)\n type_name\n end\n end",
"def obo_id(so_id)\n return \"obo:\" + so_id.sub(':', '_')\n end",
"def get_api_endpoint(type_to_scrape, obj)\n reader = CSV.read('keys/eventbrite_key.csv')\n token = reader.shift[0]\n organizer_id = reader.shift[0]\n prefix = \"https://www.eventbriteapi.com/v3\"\n case type_to_scrape\n when \"eid\"\n endpoint = \"#{prefix}/users/#{organizer_id}/owned_events/\" \\\n \"?order_by=start_desc&token=#{token}\"\n when \"event\"\n endpoint = \"#{prefix}/events/#{obj}/?token=#{token}\"\n when \"attendee\"\n endpoint = \"#{prefix}/events/#{obj}/attendees/?token=#{token}&expand=\" \\\n \"category,attendees,subcategory,format,venue,event\" \\\n \",ticket_classes,organizer,order,promotional_code\"\n when \"vid\"\n endpoint = \"#{prefix}/users/#{organizer_id}/venues/?token=#{token}\"\n when \"venue\"\n endpoint = \"#{prefix}/venues/#{obj}/?token=#{token}\"\n end\n end",
"def id_format(object)\n object.id_format || object.self_link_uri\n end",
"def base_uri\n \"#{api}/map_type\"\n end",
"def person_url(id)\n 'https://swapi.co/api/people/' + id.to_s\nend",
"def get_resource_type_identifier(type)\n get_type_identifier(type, Occi::Core::Resource.kind)\n end",
"def type_key\n type.demodulize.underscore\n end",
"def graphql_id\n Base64.encode64(\"04:#{self.class.name}#{id}\").rstrip\n end",
"def external_id; end",
"def normalize_account_id(id)\n return id if id.to_s.start_with?('act_')\n \"act_#{id}\"\n end",
"def uid_endpoint\n\t\t\t@uid[-2, 2]\n\t\tend",
"def generate_id(str); end",
"def normalize_link_id(id); end",
"def basic_generate_id(str); end",
"def primary_fta_service_type_id=(num)\n build_primary_assets_fta_service_type(fta_service_type_id: num, is_primary: true)\n end",
"def get_link_type_identifier(type)\n get_type_identifier(type, Occi::Core::Link.kind)\n end",
"def get_id_by_type(type)\n MyAdmin.get_name_by_type(type)\n end",
"def base_id\n \"#{firstname} #{lastname}\"\n end",
"def get_api_key_code(storage_type)\n case storage_type\n when :environment\n \"ENV['TIMBER_API_KEY']\"\n when :inline\n \"'#{api.api_key}'\"\n else\n raise ArgumentError.new(\"API key storage type not recognized! \" \\\n \"#{storage_type.inspect}\")\n end\n end",
"def polymorphic_type_param\n \"#{key}_type\".to_sym\n end",
"def resource_id\n return \"%s:%s\" % [self.resource_type, self.id]\n end",
"def get_outype(outype_id)\n path = \"/d2l/api/lp/#{$lp_ver}/outypes/#{outype_id}\"\n _get(path)\nend",
"def generate_short_url\n # encode base 10 id as base 62 string as seen here https://gist.github.com/zumbojo/1073996\n i = id\n return Constants::ALPHABET[0] if i.zero?\n s = ''\n base = Constants::ALPHABET.length\n while i > 0\n s << Constants::ALPHABET[i.modulo(base)]\n i /= base\n end\n update_attributes(short_url: s.reverse)\n end",
"def openid_endpoint; end",
"def id\n \n id = path_info.split(\"/\")[1]\n\n # Fix for /path/id.with.dot like /person/full.name - where format is \"json\" (derived from either Accept or Content-Type)\n if [\"html\", \"json\", \"xml\"].include? format\n if not id.nil? # for POST id is null\n id = id.gsub(/\\.(html|json|xml)$/, \"\")\n end\n \n else\n \n # Otherwise, remove trailing .json or .xml\n if id =~ /[.]/\n id = id.split(\".\")\n id.pop\n id = id.join(\".\")\n end\n \n end\n \n if id == [] or id.nil?\n id == \"\"\n end\n \n id\n \n end",
"def id_for(type, content)\n sha \"#{type} #{content.length}\\0#{content}\"\n end",
"def product_type_decode(type)\n return \"Workstation\" if type == 1\n\n \"Server\"\n end",
"def type_str\n MARKETPLACEAPP_TYPES[type]\n end",
"def widget_id\n [type.to_s, id.to_s].join('_')\n end",
"def initialize(type, id)\n @type = type.to_s\n @id = id.to_s\n end",
"def initialize(type, id)\n @type = type.to_s\n @id = id.to_s\n end",
"def full_type\n type_and_version.join(\"/\")\n end",
"def to_id(obj)\n current_server.to_id(obj)\n end",
"def model_html_id(base)\n name = base.to_s.strip\n unless name.end_with?(*RESERVED_SUFFIX)\n FIELD_PREFIX.find { |prefix| name.delete_prefix!(prefix) }\n end\n name = 'None' if name.blank?\n html_id(name, camelize: true)\n end",
"def type(index)\n i = get_field_index_by_external_id(index,@fields[:type])\n fields(index, i)['id'].to_i unless i.nil?\n end",
"def get_id(id)\n native_id = Integer(id.split(\":\")[-1])\n end",
"def conv_itype(itype)\n case itype\n when 'concept' : itype\n when 'query_doctrack' : 'query'\n else 'document'\n end\nend",
"def gen_api_key\n u = UUID.new\n self.api_key = u.generate\n end",
"def to_uri\n\"urn:uuid:\" + self.to_s\nend",
"def id\n super.to_s.tr('.', '_')\n end",
"def convert_xid(type, id)\n map = {:gid => :group, :uid => :user}\n raise ArgumentError, \"Invalid id type #{type}\" unless map.include?(type)\n ret = Puppet::Util.send(type, id)\n if ret == nil\n raise Puppet::Error, \"Invalid #{map[type]}: #{id}\"\n end\n ret\n end",
"def pay_method\n @type = params[:id]\n end",
"def jsonapi_type\n controller_name.dasherize\n end",
"def id\n \"#{controller.url}/#{name}\"[1..-1].gsub('/', '_')\n end",
"def extract_id(self_link, type)\n link = self_link.match(/#{type}\\/(?<id>\\d+)/)\n id = link['id']\nend",
"def to_param_from_slug\n slug? ? slug.to_friendly_id : id.to_s\n end",
"def to_param_from_slug\n slug? ? slug.to_friendly_id : id.to_s\n end",
"def to_param_from_slug\n slug? ? slug.to_friendly_id : id.to_s\n end",
"def obj_id\n uri.split('/').last\n end",
"def obj_id\n uri.split('/').last\n end",
"def account_type_to_route(class_string)\n account_type_to_param(class_string).sub(/_account\\z/, \"\").pluralize\n end",
"def location_type_id\n f1_type = F1::AddressType.where(id: self.address_type_id).take\n return if f1_type.nil?\n\n location_type = CIVICRM::LocationType.where(name: f1_type.name).take\n return if location_type.nil?\n\n location_type.id\n end",
"def from_id(id, type=DEFAULT_ID_TYPE)\n case type\n when :inchikey\n url = \"http://www.chemspider.com/InChI.asmx/InChIKeyToInChI?inchi_key=\" + URI::encode(id)\n doc_string = retrieve_info_from_url(url)\n doc = REXML::Document.new( doc_string )\n inchi_string = doc.root.children.first.to_s\n raise(ArgumentError, \"did not retrieve a valid inchi string\") unless inchi_string[/^InChI=/]\n from_string(inchi_string, :inchi)\n when :lmid # lipidmaps id\n url = \"http://www.lipidmaps.org/data/LMSDRecord.php?OutputType=SDF&Mode=File&LMID=\" + id\n doc_string = retrieve_info_from_url(url)\n from_string(doc_string, :sdf)\n end\n end",
"def generate_id(v)\n @collection_id + '-' + v.downcase.gsub(/\\/+/, '_').gsub(/;+|\\.+/, '').gsub(/ /, '-')\n end",
"def wsdl_constantize(type)\n type = type.split(':').last\n type = 'int' if %w[long short byte].include?(type)\n type = 'float' if type == 'double'\n type = 'binary' if type == 'base64Binary'\n type = 'ManagedObject' if type == 'ManagedObjectReference'\n\n type = type.camelcase\n type.safe_constantize || \"RbVmomi::BasicTypes::#{type}\".safe_constantize || \"#{wsdl_to_rbvmomi_namespace(@wsdl)}::#{type}\".safe_constantize\n end",
"def website_link_type_id\n @website_link_type_id ||= LinkType.find_by_name('Website').id\n end",
"def endpoint\n \"http://api.urlvoid.com/#{identifier}/#{api_key}\"\n end",
"def urn_base; \"urn:#{Settings.partition_name}:#{Settings.service.name}:#{Settings.region}\" end",
"def urn_base; \"urn:#{Settings.partition_name}:#{Settings.service.name}:#{Settings.region}\" end",
"def getPageInfoByIDType(id, type)\n request('getPageInfoByIDType', {'id' => id, 'type' => type})\nend",
"def target_id\n RssLog.all_types.each do |type|\n obj_id = send(\"#{type}_id\".to_sym)\n return obj_id if obj_id\n end\n nil\n end",
"def openid_endpoint=(_arg0); end"
] | [
"0.6792407",
"0.6662417",
"0.6573084",
"0.6489617",
"0.6410082",
"0.59508866",
"0.59441143",
"0.5940383",
"0.5859375",
"0.5813677",
"0.57950646",
"0.5763398",
"0.573497",
"0.57264423",
"0.57264423",
"0.57245207",
"0.5715948",
"0.5667675",
"0.56559783",
"0.5621114",
"0.5607873",
"0.56072474",
"0.55908114",
"0.5583123",
"0.5573894",
"0.55735546",
"0.5556802",
"0.55549353",
"0.5532438",
"0.5523663",
"0.5511759",
"0.5502029",
"0.54991436",
"0.5488611",
"0.5486279",
"0.5479271",
"0.5476725",
"0.5466312",
"0.54595286",
"0.5456722",
"0.5451252",
"0.5447389",
"0.54154223",
"0.541363",
"0.5398832",
"0.53784513",
"0.5355348",
"0.5352906",
"0.5351266",
"0.534965",
"0.5345278",
"0.533083",
"0.532749",
"0.5326241",
"0.53221977",
"0.53172624",
"0.53166217",
"0.53097874",
"0.5307764",
"0.5307593",
"0.5304259",
"0.5298376",
"0.52929217",
"0.52892524",
"0.52569294",
"0.5247385",
"0.5238405",
"0.5230612",
"0.5230612",
"0.5230191",
"0.5229415",
"0.5221986",
"0.52206856",
"0.5210336",
"0.51978904",
"0.5197337",
"0.5190771",
"0.51901513",
"0.5190034",
"0.51820415",
"0.5180112",
"0.51769537",
"0.5174385",
"0.51705706",
"0.51705706",
"0.51705706",
"0.5169591",
"0.5169591",
"0.51655495",
"0.5161478",
"0.5160287",
"0.5154808",
"0.5147802",
"0.51463",
"0.51440275",
"0.51429033",
"0.51429033",
"0.51337475",
"0.51315707",
"0.51300603"
] | 0.7049781 | 0 |
used for if then statements! | def user_login?
!!current_user
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conditionally(*) end",
"def conditionally(*) end",
"def if_condition; end",
"def cond; end",
"def cond; end",
"def cond; end",
"def condition; end",
"def if_proc; end",
"def else_clause\n expect :if\n self[3]\n end",
"def statement; end",
"def check ; true ; end",
"def if_proc=(_arg0); end",
"def then_clause\n expect :if\n self[2]\n end",
"def success?() end",
"def processing_instruction?; end",
"def processing_instruction?; end",
"def run_cond; end",
"def semact?; false; end",
"def func1 val # Val should be inside round brackets ().\r\n if val = 1 # Requires == to check status\r\n return true # Indent both returns as they relate to the if and else.\r\n else\r\n return false\r\n end\r\nend",
"def excl\n \"else \"\n end",
"def passed?; end",
"def passed?; end",
"def passed?; end",
"def complex_condition?(condition); end",
"def else?\n loc.else\n end",
"def result_of_checking; end",
"def if_stmt \n\t\n\t$cst.add_branch(\"IfStatement\")\n\t\n\tmatch_token(\"T_IF\", $tokens[$index])\n\tboolexpr\n\tblock\n\t\n\t$cst.ascend\n\t\nend",
"def continue?; end",
"def flag; end",
"def parse_condition; end",
"def parse_condition; end",
"def foo a\r\n if a==1; \"one\" elsif a==2; \"two\" else \"unknown\" end\r\nend",
"def condition\n expect :if\n self[1]\n end",
"def success?(*) end",
"def cond=(_arg0); end",
"def cond=(_arg0); end",
"def cond=(_arg0); end",
"def main\n if 0\n p 0\n end\n\n if \"\" then\n p 1\n end\n\n if false\n p 2\n end\n\n if nil\n p 3\n end\n\n scope\nend",
"def performed?; end",
"def performed?; end",
"def placebo?; false end",
"def condition(x)\r\n if x\r\n puts \"condition executed\"\r\n end\r\nend",
"def checks; end",
"def else!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 44 )\n\n\n\n type = ELSE\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 222:7: 'else'\n match( \"else\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 44 )\n\n\n end",
"def win_condition\nend",
"def success?; end",
"def success?; end",
"def success?; end",
"def success?; end",
"def success?; end",
"def success?; end",
"def test_if_statement_modifiers\n result = :default_value\n result = :true_value if true\n\n assert_equal :true_value, result\n end",
"def check_starting(_lang_result)\n end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def process_else(exp)\n add_to_score :branch\n penalize_by 0.1 do\n process_until_empty exp\n end\n s()\n end",
"def positive?; end",
"def my_if(condition, then_clause, else_clause)\n if condition\n then_clause.call\n else\n else_clause.call\n end\nend",
"def else_branch\n node.else\n end",
"def expand_elses(branch); end",
"def expand_elses(branch); end",
"def func1 val #missing the symbols for arguments\r\n if val = 1 #missing 1 equal symbol.\r\n return true\r\n else\r\n return false\r\n end\r\nend",
"def else!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n\n\n\n type = ELSE\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 27:7: 'sino'\n match( \"sino\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n\n end",
"def proceed!; end",
"def proceed!; end",
"def child_condition; end",
"def compileif\n\n end",
"def local?; end",
"def k_else!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 40 )\n\n\n\n type = K_ELSE\n channel = ANTLR3::DEFAULT_CHANNEL\n # - - - - label initialization - - - -\n\n\n # - - - - main rule block - - - -\n # at line 413:4: 'else'\n match( \"else\" )\n\n\n\n @state.type = type\n @state.channel = channel\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 40 )\n\n\n end",
"def returnin_early(early)\n return \"left early\" if early\n \"left late\"\nend",
"def method4(a)\n if a\n puts \"bam\"\n end\n unless(a)\n puts \"baz\"\n end\n end",
"def else!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 66 )\n\n type = ELSE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 187:8: 'else'\n match( \"else\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 66 )\n\n end",
"def result?; end",
"def check\n \n end",
"def check\n \n end",
"def next_statement; end",
"def probers; end",
"def process_if exp\n exp = exp.dup\n condition = exp[1] = process exp.condition\n\n if true? condition\n exp[2] = process exp.then_clause if exp.then_clause\n exp[3] = nil\n elsif false? condition\n exp[2] = nil\n exp[3] = process exp.else_clause if exp.else_clause\n else\n exp[2] = process exp.then_clause if exp.then_clause\n exp[3] = process exp.else_clause if exp.else_clause\n end\n\n exp\n end",
"def hit_condition()\n #This is a stub, used for indexing\n end",
"def conditional_requests=(_arg0); end",
"def conditions; end",
"def conditions; end",
"def else!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 20 )\n\n type = ELSE\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 322:8: 'else'\n match( \"else\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 20 )\n\n end",
"def non_complex_expression?(condition); end",
"def true(_argvs)\n return nil\n end",
"def main\n\n #\n # In Ruby, everything is an expression, even the control structures.\n #\n\n #\n # if, else, elsif\n #\n # The value that results from evaluating an if expression is the\n # value of the last expression in the code that was executed, or\n # nil if no block of code is executed.\n #\n \n x = 3\n\n if x == 1\n name = \"one\"\n elsif x == 2\n name = \"two\"\n elsif x == 3\n name = \"three\"\n elsif x == 4\n name = \"four\"\n else\n name = \"many\"\n end\n\n puts name\n\n # here is another way to write the above if expression\n\n if x == 1\n name = \"one\"\n elsif x == 2\n name = \"two\"\n elsif x == 3 then name = \"three\"\n elsif x == 4 ; name = \"four\"\n else\n name = \"many\"\n end\n\n puts name\n\n # here is another way to write the above if expression\n\n name = if x == 1 then \"one\"\n elsif x == 2 then \"two\"\n elsif x == 3 then \"three\"\n elsif x == 4 then \"four\"\n else \"many\"\n end\n\n puts name\n\n # for single-line conditionals: use \"if\" as a modifier\n y = x.odd? if x.respond_to?(:odd?)\n puts y\n \n #\n # unless is the opposite of if. unless executes code only if associated\n # expression evaluates to false or nil. elsif clauses are not allowed.\n #\n x = 10\n unless x == 0\n puts \"x is not 0\"\n else\n puts \"x is 0\"\n end\n \n # for single-line conditionals: use \"unless\" as a modifier\n s = x.to_s unless x.nil?\n puts s\n \n #\n # case statement is a multiway conditional. Like if, case returns a value\n #\n x = 5\n case x\n when 1\n name = \"one\"\n when 2\n name = \"two\"\n when 3\n name = \"three\"\n when 4\n name = \"four\"\n else # optional\n name = \"many\"\n end\n puts name\n \n # since case returns a value, here is another way to write the above case\n name = case x\n when 1\n \"one\"\n when 2\n \"two\"\n when 3\n \"three\"\n when 4\n \"four\"\n else \"many\"\n end\n puts name\n \n #\n # Ruby has three looping statements: while, until, and for/in.\n #\n x = 10\n while x >= 0 do\n puts x\n x -= 1\n end\n puts\n \n # above code using until\n x = 10\n until x < 0 do\n puts x\n x -= 1\n end\n puts\n \n # while as a modifier\n x = 0\n puts x = x + 1 while x < 10\n puts\n \n # until as a modifier\n x = 0\n puts x = x + 1 until x == 10\n puts\n \n #\n # for/in loop is used to iterate through the elements of an enumerable object\n #\n for i in 1..10 do\n print i, \" \"\n end\n print \"\\n\\n\"\n \n # print elements in an array\n a = [5, 10, 15, 20, 25]\n for e in a do\n print e, \" \"\n end\n print \"\\n\\n\"\n \n # print keys and values in a hash\n h = {:a => 1, :b => 2, :c => 3}\n for key,value in h\n puts \"#{key} => #{value}\"\n end\n puts\n \nend",
"def truth\n\t\t\t\"You can't handle the truth\" ; true\n\t\tend",
"def success?; terminal_flag == :success end",
"def success?; terminal_flag == :success end",
"def else_statement\n if @enum.peek.value == 'else'\n @instruction.push('state_'+(@if_count+1).to_s+\":\\n\")\n match(Token.new(:reserved, 'else'))\n block_statements\n end\n end",
"def complete?; end",
"def some_method(x)\n if x > 5 && x < 10\n return :a\n elsif x < 5\n return :b\n end # else??\n \n :c\nend",
"def func1 val\n if val = 1 #should be ==\n return true #needs an indent\n else\n return false#needs an indent\n end\nend",
"def block?; end",
"def missed?; end",
"def suivre; end"
] | [
"0.7320614",
"0.7320614",
"0.7181289",
"0.6968806",
"0.6968806",
"0.6968806",
"0.68913114",
"0.6626747",
"0.66117996",
"0.6603368",
"0.6441278",
"0.6436673",
"0.6352818",
"0.631207",
"0.62904346",
"0.62904346",
"0.6248565",
"0.6234932",
"0.6142858",
"0.6104267",
"0.6084472",
"0.6084472",
"0.6084472",
"0.6068512",
"0.6062234",
"0.60063255",
"0.5992915",
"0.5970504",
"0.59534395",
"0.5937429",
"0.5937429",
"0.5934524",
"0.59293777",
"0.5925863",
"0.5924753",
"0.5924753",
"0.5924753",
"0.58986306",
"0.58860666",
"0.58860666",
"0.5870133",
"0.58353263",
"0.58351564",
"0.58177435",
"0.5817392",
"0.58112055",
"0.58112055",
"0.58112055",
"0.58112055",
"0.58112055",
"0.58112055",
"0.58026254",
"0.57895947",
"0.578941",
"0.578941",
"0.578941",
"0.578941",
"0.578941",
"0.578941",
"0.578941",
"0.5776206",
"0.57586515",
"0.5758001",
"0.5757397",
"0.57372195",
"0.57372195",
"0.57358736",
"0.5714598",
"0.5711998",
"0.5711998",
"0.57035214",
"0.5699937",
"0.5697504",
"0.5693198",
"0.5689261",
"0.5686177",
"0.5681897",
"0.56729853",
"0.56519437",
"0.56519437",
"0.564976",
"0.56462485",
"0.5637291",
"0.563701",
"0.56357354",
"0.5632801",
"0.5632801",
"0.5629402",
"0.56168604",
"0.5613425",
"0.560689",
"0.55859697",
"0.5580451",
"0.5580451",
"0.55791175",
"0.55751985",
"0.5571347",
"0.55630374",
"0.55588186",
"0.55571145",
"0.554759"
] | 0.0 | -1 |
downloads csv of all research data submitted to users computer | def get_rex
if Project.exists?
@grant_data = Project.all_csv
respond_to do |format|
format.html
format.csv do
send_data @grant_data, filename: "research_data#{Time.now.to_s(:db)}.csv"
end
# format.CSV {render csv: @grant_data.to_csv}
# format.xls {render text: @grant_data.to_csv(col_sep: "\t")}
end
else
flash[:success] = "Research Expenditure Table is Empty"
redirect_to :controller => 'amrc_reports', :action => 'reports'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def index\n @search = Claim.order('claims.created_at desc').search(params[:q])\n @claims = @search.result(:distinct => true).paginate(:per_page => 50, :page => params[:page])\n respond_to do |format|\n format.html{}\n format.csv {\n send_data generate_csv, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=claims_list.csv\" \n }\n end\n end",
"def index\n @user_studies = UserStudy.all\n \n respond_to do |format|\n format.html\n format.csv { send_data @user_studies.to_csv }\n end\n end",
"def do_csv_search(params, download)\n s = do_search(params.merge({:limit => self.count, :offset => 0}))\n \n # any possible 'speed up' would need to be done here:\n results = s.results.map do |obj|\n obj.search_result_format\n end\n\n headers = results.first.keys\n filename = download.filename\n user = download.user\n id = download.id\n path = \"tmp/#{id}_#{user}_#{filename}\"\n \n csv_file = CSV.open(path, \"wb\") do |csv|\n csv << headers\n results.each do |r|\n csv << r.values \n end\n end\n\n Zip::File.open(\"#{path}.zip\", Zip::File::CREATE) do |zipfile|\n zipfile.add(filename, path)\n end\n\n File.delete(path) if File.exist?(path)\n\n download.update({status: 1, filename: \"#{filename}.zip\"})\n #download.created_by.notify(\"Your download '#{download.filename}' is ready.\")\n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def index\n @cautelas = Cautela.search(params[:search], params[:page])\n respond_to do |format|\n format.html\n #format.csv { send_data(@cautelas.to_csv) }\n format.csv {\n filename = \"CEPI_Cautelas-#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\"\n send_data(@cautelas.to_csv, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename)\n }\n end\n end",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def get_unsubmitted\n @grant_data = User.to_csv_unsubmitted\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"unsubmitted_charity_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def index\n @trainers = Trainer.paginate(:page => params[:page]).order(email_dirigeant: :desc, crawled_for_email: :desc)\n respond_to do |format|\n format.html\n format.csv { send_data Trainer.all.to_csv}\n end\n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def export(organization_name)\n CSV.open(\"/tmp/#{organization_name}.csv\", \"w\") do |csv|\n csv << [\"Name\", \"Upload Date\", \"Steps\", \"Aerobic Steps\", \"Calories\", \"Miles\", \"Device Serial\", \"Input Method\"]\n self.uploads.includes(:person).except(:order).find_each(batch_size: 6000) do |upload|\n if upload.is_device_input == 1\n input_method = \"Synced from Device\"\n else\n input_method = \"Manually Uploaded\"\n end\n csv << [\"#{upload.person.first_name} #{upload.person.last_name}\", upload.date.try(:strftime, \"%b %d %Y\"), upload.total_steps, upload.aerobic_steps, upload.calories, ('%.2f' % upload.distance), upload.device_serial, input_method]\n end\n end\n end",
"def save_results\n\t\t# save search results\n\t\tCSV.open('results.csv', 'wb') {|csv|\n\t\t\t@checked_links.each {|link|\n\t\t\t\tcsv << [link[0], link[1][:res], link[1][:time]]\n\t\t\t}\n\t\t}\n\t\t# save list of external links\n\t\tCSV.open('external-links.csv', 'wb') {|csv|\n\t\t\t@external_links.each do |link|\n\t\t\t csv << [link[0], link[1][:res], link[1][:time]]\n\t\t\tend\n\t\t}\n\t\t# save list of invalid links\n\t\tCSV.open('invalid.csv', 'wb') {|csv|\n\t\t\t@error_links.each do |link|\n\t\t\t csv << link\n\t\t\tend\n\t\t}\n\tend",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def index\n @pagetitle = \"Suppliers\"\n \n @companies = Company.all\n\n @path = 'suppliers'\n\n @suppliercsv = Supplier.all \n respond_to do |format|\n format.html\n format.csv { send_data @suppliercsv.to_csv }\n \n end\n\n\n end",
"def download\n @posts = PostService.getAllPosts\n respond_to do |format|\n format.html\n format.csv { send_data @posts.to_csv, :filename => \"Post List.csv\" }\n end\n end",
"def index\n @surveys = Survey.all\n\n respond_to do |format|\n format.html\n #format.csv { send_data @surveys.to_csv }\n end\n end",
"def index\n @firmagideris = Firmagideri.search(params[:search]).order(\"created_at DESC\").page params[:page]\n\n\n respond_to do |format|\n format.html\n format.csv { send_data @firmagideris.all.to_csv }\n format.xls\n end\n\n end",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def index\n @initial_surveys = InitialSurvey.all\n respond_to do |format|\n format.html { render :action => \"index\" }\n format.csv { send_data InitialSurvey.to_csv(@initial_surveys) }\n end\n end",
"def download\n \n @searches = Array.new\n \n if request.get?\n if !params[:id].nil?\n params[:id].each do |value|\n s = Search.find(value.to_i)\n s.last_action = \"Downloaded\"\n s.last_action_date = Time.now\n s.save\n @searches << Search.find(value.to_i)\n end\n else\n redirect_to :back and return\n end\n end\n \n download = \"\" \n \n if params[:download_format] == \"txt\"\n \n @searches.each do |s|\n \n download << \"Date: \" + (l s.created_at, format: :long) + \"\\n\"\n download << \"Search: \" + s.query_params['q'] + \"\\n\"\n download << \"Results: \" + s.numfound.to_s + \"\\n\"\n download << \"Notes: \" + s.notes + \"\\n\"\n download << \"URL: http://#{request.host_with_port}/catalog?\" + (s.query_params.except(:action, :controller, :only_path, :saved)).to_query + \"\\n\"\n download << \"Saved: \" + (!s.user_id.nil?).to_s+ \"\\n\\n\"\n end\n \n elsif params[:download_format] == \"csv\"\n \n download << \"Date,Search,Results,Notes,URL,Saved\\n\"\n @searches.each do |s|\n download << \"\\\"\" + (l s.created_at, format: :long) + \"\\\",\"\n download << s.query_params['q'] + \",\"\n download << s.numfound.to_s + \",\"\n download << s.notes + \",\"\n download << \"http://#{request.host_with_port}/catalog?\" + (s.query_params.except(:action, :controller, :only_path, :saved)).to_query + \",\"\n download << (!s.user_id.nil?).to_s + \"\\n\"\n end\n \n else\n download << \"Date\\tSearch\\tResults\\tNotes\\tURL\\tSaved\\n\"\n @searches.each do |s|\n download << \"\\\"\" + (l s.created_at, format: :long) + \"\\\"\\t\"\n download << s.query_params['q'] + \"\\t\"\n download << s.numfound.to_s + \"\\t\"\n download << s.notes + \"\\t\"\n download << \"http://#{request.host_with_port}/catalog?\" + (s.query_params.except(:action, :controller, :only_path, :saved)).to_query + \"\\t\"\n download << (!s.user_id.nil?).to_s + \"\\n\"\n end\n \n end\n \n send_data(download, :filename => 'searches.' + params[:download_format])\n return\n \n end",
"def to_csv()\n all = general_info_csv() + \"\\r\\n\" +\n teacher_for_csv() + \"\\r\\n\" +\n contributor_to_csv() + \"\\r\\n\" +\n published_in_csv() + \"\\r\\n\" +\n text_fields_csv()\n all\n end",
"def index\n @bestthesisawards = Bestthesisaward.all\n respond_to do |format|\n format.html\n format.csv { send_data @bestthesisawards.to_csv, \n filename: \"bestthesisawards-#{Date.today}.csv\" }\n end\n end",
"def download_handoff_report\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure_new(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv_new(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report-New.csv'\n\n \tend",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def get_individual\n individual = User.find(params[:charity_id])\n if Project.exists?(userID: params[:charity_id])\n csv_data = Project.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_research_expenditure_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Research Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'research_individual'\n end\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def index\n @conditions = Condition.all.order(created_at: :desc)\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"condition-list.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end \n end",
"def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def index\n @students = Student.all\n respond_to do |format|\n format.html\n format.csv { send_data @students.to_csv }\n end\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def index\n @search = School.with_contacts.search(params[:q])\n @order = (params[:q] && params[:q][:s]) ? params[:q][:s] : 'name asc'\n @schools = @search.result.order(@order).page(params[:page]).per(25)\n # @export_headers = params[:export]\n\n respond_to do |format|\n format.html\n format.csv { send_data @schools.to_csv }\n format.xls { send_data @schools.to_csv(col_sep: \"\\t\") }\n end\n end",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def index\n @smallsurveys = Smallsurvey.all\n respond_to do |format|\n format.html\n format.csv { send_data @smallsurveys.to_csv }\n end\n end",
"def index\n @colaboradores = Colaboradore.all\n\n respond_to do |format|\n format.html\n format.csv {render text: @colaboradores.to_csv }\n end\n\n if params[:search] \n @colaboradores = Colaboradore.search(params[:search])\n else\n @colaboradores = Colaboradore.all\n end\n authorize Colaboradore\n end",
"def save_to_csv(jobs)\n CSV.open('../docs/cryptocurrencyjobslist.csv', 'wb') do |csv|\n csv << [\"Company\", \"Title\", \"Link\", \"Location\", \"Category\"]\n for i in 0..jobs.length-1\n csv << [jobs[i].company, jobs[i].title, jobs[i].listing_url, jobs[i].location, jobs[i].category]\n end\n end\nend",
"def index\n @empire_master_no_matches = EmpireMasterNoMatch.all\n\n if params['path'] == 'ny_export_empire_master_no_matches'\n respond_to do |format|\n format.html\n format.csv { send_data @empire_master_no_matches.where(source: 'NY').where(double: false).to_csv, filename: \"ny-empire-master-no-matches-#{Date.today}.csv\" }\n end\n end\n if params['path'] == 'ny_export_empire_master_doubles'\n respond_to do |format|\n format.html\n format.csv { send_data @empire_master_no_matches.where(source: 'NY').where(double: true).to_csv, filename: \"ny-empire-master-doubles-#{Date.today}.csv\" }\n end\n end\n\n # Remove all records before new list import\n if params['remove_all'] == 'yes' && params['confirm'] == 'yes'\n EmpireMasterNoMatch.delete_all\n redirect_to empire_master_no_matches_path(), note: 'Records Deleted'\n end\n\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def index\n @companies = Company.all\n @top_billing = Company.ordered_by_current_bill\n\n @import = Company::Import.new\n # authorize @companies\n skip_authorization\n respond_to do |format|\n format.html\n format.csv { send_data @companies.to_csv, filename: \"companies-export-#{Time.now}-inclustaff.csv\" }\n \tend\n\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def csv_for_company\n filter = params[:filter]\n company_name = Company.find(params[:company_id]).name\n csv_name = filter.present? ?\n \"#{company_name}_operations_filtered_by_#{filter}.csv\" :\n \"#{company_name}_operations.csv\"\n respond_to do |format|\n format.csv { send_data to_csv ,filename: csv_name}\n end\n end",
"def index\n # @books = Book.all.order(id: \"DESC\")\n\n respond_to do |format|\n format.html\n format.csv {send_data @books.generate_csv, filename: \"book_on_rails-#{Time.zone.now.strftime('%Y%m%d%S')}.csv\"}\n end\n end",
"def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend",
"def export_data\r\n folder = \"\\data\"\r\n FileUtils.mkdir_p folder\r\n CSV.open(File.join(folder, @output_stats_name), 'wb') do |csv|\r\n csv << @global_bests\r\n csv << @average_global_bests\r\n @neighbourhoods_list[0].report_particles.each do |x|\r\n csv << x\r\n end\r\n end\r\n end",
"def index\n @trial_sites = TrialSite.all\n respond_to do |format|\n format.html\n format.csv { send_data @trial_sites.to_csv }\n format.xls { send_data @trial_sites.to_csv(col_sep: \"\\t\") }\n end\n end",
"def save_as_csv\n h = get_townhall_urls\n CSV.open('db/emails.csv', 'wb', write_headers: true, headers: ['Mairies du 95', 'Email']) do |csv|\n h.to_a.each { |elem| csv << elem }\n end\n puts \"\\nTon fichier csv est prêt\\n\\n\"\n Index.new.index\n end",
"def index\n @study_spots = StudySpot.all\n\n respond_to do |format|\n format.html\n format.csv { send_data UsageTime.all.to_csv }\n end\n end",
"def index\n @backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @backend_tutorial_stats }\n format.csv do\n backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n data = Backend::TutorialStat.download_tutorial_stats_csv(backend_tutorial_stats) \n filename = \"tutorial_stats#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\" \n send_data(data, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename) \n end\n end \n end",
"def index\n if params[:csv]\n # generate the header line\n csv_string = 'Campers , Address ,'\n if @option.use_2nd_address?\n\tcsv_string << 'addr2,'\n end\n csv_string << 'City, State, Mail code,'\n if @option.use_country? && Country.active.count > 0\n\tcsv_string << 'Country,'\n end\n case @option.no_phones\n\twhen 1\n\t csv_string << 'Phone, '\n\twhen 2\n\t csv_string << 'Phone, 2nd Phone, '\n end\n csv_string << 'email address, last activity'+\"\\n\"\n # now for the data\n Camper.all.each do |c|\n\tcsv_string << c.full_name + ',' + c.address + ','\n\tcsv_string << (c.address2 ? c.address2 : '') + ',' if @option.use_2nd_address?\n\tcsv_string << c.city + ',' + c.state + ',' + c.mail_code + ','\n\tif @option.use_country? && Country.active.count > 0\n\t if c.country_id?\n\t csv_string << (c.country.name? ? c.country.name : '') + ','\n\t else\n\t csv_string << ','\n\t end\n\tend\n\tcsv_string << (c.phone ? c.phone : '' ) + ',' if @option.no_phones > 0\n\tcsv_string << (c.phone_2 ? c.phone_2 : '' ) + ',' if @option.no_phones > 1\n\tcsv_string << (c.email ? c.email : '' ) + ',' + c.activity.to_s + \"\\n\"\n end\n # debug csv_string\n send_data(csv_string,\n\t\t:type => 'text/csv;charset=iso-8859-1;header=present',\n\t\t:disposition => 'attachment; filename=Campers.csv') if csv_string.length\n else\n @page_title = \"Camper Report\"\n @campers = Camper.all\n end\n end",
"def export_issues_csv\r\n sort_init 'issues.id', 'desc'\r\n sort_update\r\n\r\n search_filter_init_list_issues\r\n\t\t\t\t\t\r\n @issues = Issue.find :all, :order => sort_clause,\r\n\t\t\t\t\t\t:include => [ :author, :status, :tracker, :project ],\r\n\t\t\t\t\t\t:conditions => search_filter_clause\t\t\t\t\t\t\t\r\n\r\n export = StringIO.new\r\n CSV::Writer.generate(export, ',') do |csv|\r\n csv << %w(Id Status Tracker Subject Author Created Updated)\r\n @issues.each do |issue|\r\n csv << [issue.id, issue.status.name, issue.tracker.name, issue.subject, issue.author.display_name, l_datetime(issue.created_on), l_datetime(issue.updated_on)]\r\n end\r\n end\r\n export.rewind\r\n send_data(export.read,\r\n :type => 'text/csv; charset=utf-8; header=present',\r\n :filename => 'export.csv')\r\n end",
"def index\n authorize! :index, Walkathon::Pledge.new\n @walkathon_pledges = Walkathon::Pledge.includes(:student)\n if params[:student_id]\n @walkathon_pledges = @walkathon_pledges.where(student_id: params[:student_id])\n @student = Student.find(params[:student_id])\n end\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"pledges.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n end",
"def index\n #@employees = Employee.all #created by rails scaffold\n if params[:search]\n @employees = Employee.search(params[:search]).order(\"lastname ASC\")\n else\n @employees = Employee.all.order(\"lastname ASC\")\n end\n\n respond_to do |format|\n format.html\n format.csv { render text: @employees.to_csv }\n #to create the csv file for download\n end\n end",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def welldcsv\n @payment_run = PaymentRun.find(params[:id])\n \n paydates = ''\n paydates = @payment_run.payment_date.to_s(:local) unless @payment_run.payment_date.blank?\n \n send_data(@payment_run.welld_payment_file,:type => 'text/plain', :filename => \"payments-RUN#{@payment_run.id}-#{paydates}.csv\" )\n end",
"def get_grant_data\n if GrantsData.exists?\n @grant_data = GrantsData.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_grants_data_#{Time.now.to_s(:db)}.csv\"\n end\n end\n else\n flash[:notice] = 'Grant Data Table Empty'\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def download(county)\n puts \" F95BA #{county}\"\n results = @client.find :all, {\n class: '1', # 1 Residential\n query: \"(246=|A),(61=|#{county})\", #246 ListingStatus\n #A ActiveAvailable\n #61 County\n select: '157,881,10,922,924,137,261,129,246,80,61,25,1424,102,214,314,96,1,131,1329,sysid', \n search_type: 'Property'\n }\n puts \"F95BA #{results.size} listings\"\n #puts \"F95BA saving\"\n pg_save(results)\n results\n end",
"def index\n @search=UrunSearch.new(params[:search])\n @uruns=@search.scope.order(\"created_at DESC\").page params[:page]\n\n @search=SutSearch.new(params[:search])\n @suts=@search.scope\n\n\n\n respond_to do |format|\n format.html\n format.csv { send_data @uruns.to_csv }\n format.xls\n end\n end",
"def index\n #@tenacities = Tenacity.all\n @search = Tenacity.search(params[:q])\n @tenacities = @search.result\n @search.build_condition \n \n respond_to do |format|\n format.html\n format.csv { send_data @tenacities.to_csv }\n format.xls\n end \n end",
"def index\n @localbrs = Localbr.all\n respond_to do |format|\n format.csv { send_data @localbrs.to_csv}\n format.html\n end\n end",
"def index\n\t\t@users= User.all\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.csv { send_data Importer.generate(:csv, @users), filename: \"users-#{Date.today}.csv\" }\n\t\tend\n\n\tend",
"def index\n # @articles = Article.includes(:criminal_code)\n @articles = Article.with_current_and_all_prisoner_count\n\n respond_to do |format|\n format.html\n format.json\n format.csv do\n send_data Article.to_csv,\n filename: GeneratedFile.clean_filename('articles', 'csv'),\n type: 'text/csv'\n end\n end\n end",
"def export(params={})\n columns = delimited_string_to_array(Settings.export.travel_fields)\n send_data Travel.export(columns), :filename => \"travel.csv\"\n end",
"def exported_report\n #The folder where the filename points to, is actually in the ~/rails/Forester because of capistrano as\n # the Apache point to ~/rails/Forester/current symlinkfolder and capistrano updates the them. \n @filename = \"quarterly_report_#{params[:year]}_#{params[:quarter]}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n if params[:quarter] == \"1\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{(params[:year].to_i-1)}-12-31' AND delivery_date<'#{params[:year]}-04-01'\")\n else\n if params[:quarter] == \"2\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-03-31' AND delivery_date<'#{params[:year]}-07-01'\")\n else\n if params[:quarter] == \"3\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-06-30' AND delivery_date<'#{params[:year]}-10-01'\")\n else\n if params[:quarter] == \"4\" then\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-09-30' AND delivery_date<'#{(params[:year].to_i+1)}-01-01'\")\n end\n end\n end\n end\n \n #Writing to file starts with empty line.\n File.open(\"#{@file_path}#{@filename}\", 'w') do |writer|\n writer.puts(\"\\n\")\n end\n \n #From the tickets delivered in the given quarter, the job ids are gathered here\n @job_ids = @tickets.collect {|i| i.job_id }\n @jobs = Job.find(@job_ids)\n \n #To have less DB calls, all specie records are put into an instance variable\n @species = Specie.all\n \n #Goes through all the jobs, for each sums up all the mbf and tonnages and writes them into the file\n # per specie.\n @jobs.each do |k|\n @my_tickets = []\n @tickets.each do |l|\n if l.job_id == k.id\n @my_tickets.push(l)\n end\n end\n \n @amounts = []\n \n @species.length.times do\n @amounts.push([0, 0])\n end\n \n @total_pulp = 0\n \n @my_tickets.each do |i|\n i.load_details.each do |j|\n if i.wood_type == 3 || j.species_id == 0 #wood type 3 & species_id 0 == pulp\n @total_pulp = @total_pulp + j.tonnage\n next #If load is pulp, it has only one load detail so program jups to next loop\n end\n #Amounts of mbf/tonnage are summed up here per ticket according to their specie.\n @amounts[j.species_id-1][0] = @amounts[j.species_id-1][0] + j.mbfss #This and triple-n tonnage in next are helper methods. See their documentation.\n @amounts[j.species_id-1][1] = @amounts[j.species_id-1][1] + j.tonnnage\n end\n end\n \n #Finally, the values calculated above are written into the file.\n File.open(\"#{@file_path}#{@filename}\", 'a') do |writer|\n writer.puts \"Job, #{k.name}\"\n writer.puts \"Category, MBF, Tonnage\"\n @species.each do |i|\n writer.puts \"#{i.code}, #{round_to(@amounts[i.id-1][0].to_f, 2)}, #{round_to(@amounts[i.id-1][1].to_f, 2)}\"\n end\n writer.puts \"Pulp, ,#{round_to(@total_pulp.to_f, 2)}\"\n writer.puts(\"\\n\")\n end\n end\n \n #The file created is opened in 'r' (== read) mode and send to user\n @file = File.open(\"#{@file_path}#{@filename}\", 'r')\n \n send_data(@file.read, :type => \"csv\", :filename => @filename)\n end",
"def csv_data\n sql = Utils.setup_basic_filters(SqlAssembler.new, @params.merge('site_id' => @site[:id]))\n sql.set_select 'SELECT package_name, success, title, digitool_id, islandora_pid, content_model, time_started, time_finished, bytes_ingested FROM islandora_packages'\n sql.set_order 'ORDER BY id DESC'\n return sql.execute\n end",
"def index\n tasks = Task.where(user_id: session[:user_id])\n @tasks = tasks.sort_by(&:created_at).reverse\n\n #binding.pry\n\n respond_to do |format|\n format.html\n format.csv { send_data as_csv(@tasks), filename: \"tasks-#{Date.today}.csv\" }\n end\n end",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def data_input\n @students.each do |obj|\n File.open(\"students_directory.csv\", \"a\") { |file| file.puts \"#{obj[:name]}, #{obj[:age]}, #{obj[:gender]}\" }\n end\nend",
"def save_as_csv\n result = get_result_all_city()\n begin\n CSV.open(\"db/city.csv\", \"wb\") do |csv|\n result.each do |city|\n convert = city.to_a\n csv << convert[0]\n end\n end\n rescue => exception\n puts \"Some errors have orcurred!\"\n end\n end",
"def download_handoff_report_old\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report.csv'\n\n \tend",
"def download_all\n if platinum_user_and_above?\n @domains=Domain.where(\"name is not null\")\n template = Rails.root.join(\"app\",\"views\",\"reports\", \"DomainPortfolio-template.xlsx\")\n workbook = RubyXL::Parser.parse(template)\n worksheet = workbook.worksheets[0]\n worksheet.sheet_name = 'All'\n index = 0\n @domains.each do |domain|\n next if domain.name.nil?\n next if domain.name.empty?\n index += 1\n if domain.transferable\n my_row = [domain.name, \"yes\"]\n else\n my_row = [domain.name, \"no\"]\n end\n worksheet_write_row(worksheet,index, my_row)\n end\n file = \"DomainPortfolio-All-\" + Time.now.strftime('%m%d%Y') + \".xlsx\"\n send_data workbook.stream.string, filename: file, disposition: 'attachment'\n else\n redirect_back :fallback_location => root_path, :alert => \"Access denied.\"\n end\n end",
"def index\n @tools = Tool.page(params[:page]).per(5)\n respond_to do |format|\n format.html\n format.csv { send_data @tools.to_csv}\n end\n end",
"def generateCSV()\n findCommits\n findLinesOfCode\n\n CSV.open(\"data.csv\", \"wb\") {|csv| @students.to_a.each {|elem| csv << elem} }\nend",
"def index\n authorize! :view, Measurement\n\n @measurements = Measurement.all\n\n @sites = Site.all\n @instruments = Instrument.all\n \n respond_to do |format|\n format.html\n format.csv { send_data @measurements.to_csv }\n end\n end",
"def index\n # Find all participants that match search criteria.\n @participants = Participant.search(params[:search])\n # Export all member's information to a csv file.\n @exports = Participant.all\n respond_to do |format|\n format.html\n format.csv { send_data @exports.to_csv }\n end\n end",
"def export_students_to_csv\n CSV.generate(headers: true) do |csv|\n csv << %w[perm email first_name last_name github_username]\n\n roster_students.each do |user|\n csv << [\n user.perm,\n user.email,\n user.first_name,\n user.last_name,\n user.username\n ]\n end\n end\n end",
"def index\n @employees = Employee.all.select(:id, :first_name, :last_name, :email, :phone, :salary, :area)\n #@import = Employee::Import.new \n respond_to do |format|\n format.html\n format.csv { send_data @employees.to_csv, filename: \"employees-#{Date.today}.csv\" }\n end\n end",
"def index\n @scrap_url = 'https://abilene.craigslist.org/search/hhh?min_bedrooms=3&max_bedrooms=3&max_bathrooms=2&min_bathrooms=2'\n @homes = Home.all\n\n respond_to do |format|\n format.html\n format.csv { render json: @homes.to_csv, layout: false }\n end\n end"
] | [
"0.7011126",
"0.69842565",
"0.686099",
"0.678201",
"0.66817033",
"0.66473687",
"0.65763",
"0.657369",
"0.65720564",
"0.6526086",
"0.6525991",
"0.65042615",
"0.6411234",
"0.6344524",
"0.6336109",
"0.6324183",
"0.62556845",
"0.62349075",
"0.6234365",
"0.6226747",
"0.621516",
"0.6196099",
"0.6191183",
"0.6184417",
"0.6179744",
"0.6153665",
"0.6137377",
"0.6124921",
"0.61215746",
"0.6108187",
"0.61076844",
"0.6099587",
"0.60871273",
"0.60839665",
"0.60763013",
"0.60738164",
"0.6072475",
"0.6072258",
"0.6062515",
"0.60622746",
"0.60612994",
"0.60384285",
"0.6020984",
"0.6018525",
"0.6010641",
"0.6010261",
"0.60088575",
"0.6007568",
"0.6005309",
"0.60050493",
"0.60007316",
"0.5989798",
"0.59826636",
"0.5981381",
"0.5963213",
"0.5963053",
"0.5961639",
"0.5958799",
"0.59577525",
"0.59409493",
"0.59314805",
"0.5921875",
"0.5916828",
"0.591299",
"0.59002495",
"0.58948106",
"0.5882749",
"0.5881687",
"0.58792275",
"0.5874496",
"0.5873075",
"0.5858529",
"0.58531",
"0.58486915",
"0.58462894",
"0.58141184",
"0.581002",
"0.58095914",
"0.5809224",
"0.5804178",
"0.5801084",
"0.5801058",
"0.5793707",
"0.57882047",
"0.57875144",
"0.5776701",
"0.57753175",
"0.5771106",
"0.57681865",
"0.5767966",
"0.5765476",
"0.57628226",
"0.5754308",
"0.5750528",
"0.5748741",
"0.57472974",
"0.5745919",
"0.5744291",
"0.57442003",
"0.574295"
] | 0.6199916 | 21 |
downloads csv of all unsubmitted charity information to users computer | def get_unsubmitted
@grant_data = User.to_csv_unsubmitted
respond_to do |format|
format.html
format.csv do
send_data @grant_data, filename: "unsubmitted_charity_data#{Time.now.to_s(:db)}.csv"
end
# format.CSV {render csv: @grant_data.to_csv}
# format.xls {render text: @grant_data.to_csv(col_sep: "\t")}
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def index\n if params[:csv]\n # generate the header line\n csv_string = 'Campers , Address ,'\n if @option.use_2nd_address?\n\tcsv_string << 'addr2,'\n end\n csv_string << 'City, State, Mail code,'\n if @option.use_country? && Country.active.count > 0\n\tcsv_string << 'Country,'\n end\n case @option.no_phones\n\twhen 1\n\t csv_string << 'Phone, '\n\twhen 2\n\t csv_string << 'Phone, 2nd Phone, '\n end\n csv_string << 'email address, last activity'+\"\\n\"\n # now for the data\n Camper.all.each do |c|\n\tcsv_string << c.full_name + ',' + c.address + ','\n\tcsv_string << (c.address2 ? c.address2 : '') + ',' if @option.use_2nd_address?\n\tcsv_string << c.city + ',' + c.state + ',' + c.mail_code + ','\n\tif @option.use_country? && Country.active.count > 0\n\t if c.country_id?\n\t csv_string << (c.country.name? ? c.country.name : '') + ','\n\t else\n\t csv_string << ','\n\t end\n\tend\n\tcsv_string << (c.phone ? c.phone : '' ) + ',' if @option.no_phones > 0\n\tcsv_string << (c.phone_2 ? c.phone_2 : '' ) + ',' if @option.no_phones > 1\n\tcsv_string << (c.email ? c.email : '' ) + ',' + c.activity.to_s + \"\\n\"\n end\n # debug csv_string\n send_data(csv_string,\n\t\t:type => 'text/csv;charset=iso-8859-1;header=present',\n\t\t:disposition => 'attachment; filename=Campers.csv') if csv_string.length\n else\n @page_title = \"Camper Report\"\n @campers = Camper.all\n end\n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def save_as_csv\n result = get_result_all_city()\n begin\n CSV.open(\"db/city.csv\", \"wb\") do |csv|\n result.each do |city|\n convert = city.to_a\n csv << convert[0]\n end\n end\n rescue => exception\n puts \"Some errors have orcurred!\"\n end\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def download_csv(csv_string, file_name)\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv;charset=utf-8;header=present'\n# send_data csv_string, :filename => file_name, :disposition => 'attachment', :type => 'text/csv;charset=utf-8;header=present'\n end",
"def save_csv\n @csv_content = params[:csv_content]\n if !@csv_content.nil? && !@csv_content.empty?\n #csv_content_windows1255 = Iconv.conv('windows-1255', 'utf-8', @csv_content)\n #send_data csv_content_windows1255,\n send_data @csv_content,\n :filename => \"#{Rails.cache.read('csv_content_type')}.csv\",\n :type => 'text/csv'\n #:type => 'text/csv; charset=windows-1255'\n else\n render :home\n end\n end",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def save_to_csv(players)\n CSV.open('../docs/collegeQBstats.csv', 'wb') do |csv|\n csv << [\"NAME\", \"POS\", \"CMP\", \"ATT\", \"CMP%\", \"YDS\", \"AVG\", \"LNG\", \"TD\", \"INT\", \"SACK\", \"RTG\"]\n for i in 0..players.length-1\n csv << [players[i].name, players[i].college, players[i].pos, players[i].cmp, players[i].att, players[i].cmp_pct, \n players[i].yds, players[i].lng, players[i].td, players[i].int, players[i].sack, players[i].rtg]\n end\n end\nend",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def to_csv()\n all = general_info_csv() + \"\\r\\n\" +\n teacher_for_csv() + \"\\r\\n\" +\n contributor_to_csv() + \"\\r\\n\" +\n published_in_csv() + \"\\r\\n\" +\n text_fields_csv()\n all\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def download_movie_info(title)\n # Fetch movie data from IMBD per the title entered.\n raw_movie_data = Imdb::Search.new(title).movies.first\n\n # Organize the fetched movie data into array\n array_movie_data = []\n array_movie_data << raw_movie_data.title << raw_movie_data.year << raw_movie_data.company << raw_movie_data.genres.join(\", \").to_s << raw_movie_data.length << raw_movie_data.director << raw_movie_data.mpaa_rating << raw_movie_data.tagline << raw_movie_data.poster << raw_movie_data.release_date\n\n # Save the array into 'movies.csv' file as pipe-separated data for later access\n f = File.new('movies.csv', 'a+')\n f.puts(array_movie_data.join(\"|\"))\n f.close\n return array_movie_data\nend",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def render_csv(csv, filename = nil)\n filename ||= params[:action]\n filename += '.csv'\n\n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\"\n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Expires'] = \"0\"\n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\"\n end\n\n render :layout => false, :text => csv\n end",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end",
"def show\n\t\t#@data = CSV.generate do |csv|\n\t\t#\t(1..2000).each_with_index do |user, index|\n\t\t#\t\tcsv << [\"MA_user\" + \"#{index+1}\" + \"@cb.com\", \"12345678\"]\n\t\t#\tend\n\t\t#end\n\t\t#send_data(@data, :type => 'text/csv; charset=utf-8; header=present', :filename => \"MA_user_2000.csv\")\n end",
"def download_issues\n import_job = ImportJob.find(params[:import_job_id])\n @output_csv_string = import_job.download_issues_csv_string\n respond_to do |format|\n format.csv do\n filename = \"#{File.basename(import_job.file_name, '.csv')}-import_issues_report-#{Time.now.strftime(\"%Y%m%d\")}.csv\"\n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\"\n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Expires'] = \"0\"\n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\"\n end\n end\n end\n end",
"def get_individual\n individual = User.find(params[:charity_id])\n if Project.exists?(userID: params[:charity_id])\n csv_data = Project.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_research_expenditure_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Research Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'research_individual'\n end\n end",
"def show\n\t@pcb = Pcb.find(params[:id])\n\trespond_to do |format|\n\t format.html\n\t format.csv { send_data Pcb.mob_csv(id: params[:id]), filename: \"pcb-bom-#{Date.today}.csv\" }\n\tend \n end",
"def download(county)\n puts \" F95BA #{county}\"\n results = @client.find :all, {\n class: '1', # 1 Residential\n query: \"(246=|A),(61=|#{county})\", #246 ListingStatus\n #A ActiveAvailable\n #61 County\n select: '157,881,10,922,924,137,261,129,246,80,61,25,1424,102,214,314,96,1,131,1329,sysid', \n search_type: 'Property'\n }\n puts \"F95BA #{results.size} listings\"\n #puts \"F95BA saving\"\n pg_save(results)\n results\n end",
"def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend",
"def export_issues_csv\r\n sort_init 'issues.id', 'desc'\r\n sort_update\r\n\r\n search_filter_init_list_issues\r\n\t\t\t\t\t\r\n @issues = Issue.find :all, :order => sort_clause,\r\n\t\t\t\t\t\t:include => [ :author, :status, :tracker, :project ],\r\n\t\t\t\t\t\t:conditions => search_filter_clause\t\t\t\t\t\t\t\r\n\r\n export = StringIO.new\r\n CSV::Writer.generate(export, ',') do |csv|\r\n csv << %w(Id Status Tracker Subject Author Created Updated)\r\n @issues.each do |issue|\r\n csv << [issue.id, issue.status.name, issue.tracker.name, issue.subject, issue.author.display_name, l_datetime(issue.created_on), l_datetime(issue.updated_on)]\r\n end\r\n end\r\n export.rewind\r\n send_data(export.read,\r\n :type => 'text/csv; charset=utf-8; header=present',\r\n :filename => 'export.csv')\r\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end",
"def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def save\n CSV.open(\"./db/gossip.csv\", \"ab\") do |csv|\n csv << [@author, @content]\n end\nend",
"def render_csv(filename = nil)\n filename ||= params[:action]\n filename += '.csv'\n\n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain; charset=iso-8859-1;\"\n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Expires'] = \"0\"\n else\n headers[\"Content-Type\"] ||= 'text/csv; charset=iso-8859-1;'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\"\n end\n\n render :layout => false\n end",
"def badges\n @competitors=Competitor.all\n respond_to do |format|\n format.csv do\n filename = \"competitor-badges-\" + Time.now.strftime(\"%m-%e-%Y\")\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n end",
"def generate_csv\n\n fields = @resource.typus_fields_for(:csv)\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = Rails.root.join(\"tmp\", \"export-#{@resource.to_resource}-#{Time.now.utc.to_s(:number)}.csv\")\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields.keys\n @resource.find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map do |key, value|\n case value\n when :transversal\n a, b = key.split(\".\")\n record.send(a).send(b)\n when :belongs_to\n record.send(key).to_label\n else\n record.send(key)\n end\n end\n end\n end\n end\n\n send_file filename\n\n end",
"def index\n @mobiles = Customer.pluck(:mobile)\n @agents = User.where(\"role = ?\", 2).pluck(:fname, :id)\n @franchises = User.where(\"role = ?\", 1).pluck(:fname, :id)\n @customers_csv = (policy_scope Customer).order(\"id desc\")\n @customers = (policy_scope Customer).order(\"id desc\").page params[:page]\n authorize Customer.new, :index?\n \n respond_to do |format|\n format.html\n format.csv { send_data @customers_csv.as_csv }\n end\n\n\n end",
"def export_archive\n if flash[:archive]\n if flash[:archive] == \"true\"\n result = Urlmaster.where(:archive => \"archive\").order(:archiveTime => :desc)\n nameArc = \"Archive\"\n elsif flash[:archive] == \"false\"\n result = Urlmaster.where(:archive => nil)\n nameArc = \"NotArchive\"\n end \n end\n # `result` variable contains list needed to export\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-\" + nameArc + \"-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n end",
"def export(organization_name)\n CSV.open(\"/tmp/#{organization_name}.csv\", \"w\") do |csv|\n csv << [\"Name\", \"Upload Date\", \"Steps\", \"Aerobic Steps\", \"Calories\", \"Miles\", \"Device Serial\", \"Input Method\"]\n self.uploads.includes(:person).except(:order).find_each(batch_size: 6000) do |upload|\n if upload.is_device_input == 1\n input_method = \"Synced from Device\"\n else\n input_method = \"Manually Uploaded\"\n end\n csv << [\"#{upload.person.first_name} #{upload.person.last_name}\", upload.date.try(:strftime, \"%b %d %Y\"), upload.total_steps, upload.aerobic_steps, upload.calories, ('%.2f' % upload.distance), upload.device_serial, input_method]\n end\n end\n end",
"def save_vix_future_data(year, month, directory, force_download = false)\n force_download = force_download || year > Today.year || (year == Today.year && month >= Today.month) # we want to re-download files for contracts that haven't expired yet\n \n month_code = MonthToMonthCode[month]\n year_suffix = year.to_s[-2..-1]\n file_name = \"CFE_#{month_code}#{year_suffix}_VX.csv\"\n file_path = File.join(directory, file_name)\n \n if File.exists?(file_path) && !force_download\n puts \"File #{file_path} already exists. Skipping.\"\n else\n url = \"http://cfe.cboe.com/Publish/ScheduledTask/MktData/datahouse/#{file_name}\"\n\n puts \"Downloading #{url}\"\n file_contents = open(url).read()\n File.open(file_path, 'w') { |file| file.write(file_contents) }\n end\n \n file_path\nrescue => e\n puts e.message\nend",
"def show\n respond_to do |format|\n format.csv do\n filename = \"descriptive-#{Druid.new(@cocina).without_namespace}.csv\"\n send_data create_csv, filename:\n end\n end\n end",
"def save_to_csv(jobs)\n CSV.open('../docs/cryptocurrencyjobslist.csv', 'wb') do |csv|\n csv << [\"Company\", \"Title\", \"Link\", \"Location\", \"Category\"]\n for i in 0..jobs.length-1\n csv << [jobs[i].company, jobs[i].title, jobs[i].listing_url, jobs[i].location, jobs[i].category]\n end\n end\nend",
"def generateCSV()\n findCommits\n findLinesOfCode\n\n CSV.open(\"data.csv\", \"wb\") {|csv| @students.to_a.each {|elem| csv << elem} }\nend",
"def index\n @criminal_codes = CriminalCode.includes(:translations).order(:name)\n\n respond_to do |format|\n format.html\n format.csv do\n send_data CriminalCode.to_csv,\n filename: GeneratedFile.clean_filename('criminal_codes', 'csv'),\n type: 'text/csv'\n end\n end\n end",
"def download_csv_file_most_seached_merchants\n if (params[:start_date]&¶ms[:end_date]).blank?\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.all.order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at\n writer << csv_value.map(&:inspect).join(', ')\n writer << \"\\n\"\n end\n end\n send_file(file_name)\n else\n start_date = Date.strptime(params[:start_date], \"%m/%d/%Y\")\n end_date = Date.strptime(params[:end_date], \"%m/%d/%Y\")\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant#{start_date}_to_#{end_date}.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.where(\"DATE(created_at) >= ? AND DATE(created_at) <= ?\", start_date, end_date).order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at, merchant.updated_at\n writer << csv_value\n writer << \"\\n\"\n end\n end\n \n send_file(file_name)\n end\n end",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def welldcsv\n @payment_run = PaymentRun.find(params[:id])\n \n paydates = ''\n paydates = @payment_run.payment_date.to_s(:local) unless @payment_run.payment_date.blank?\n \n send_data(@payment_run.welld_payment_file,:type => 'text/plain', :filename => \"payments-RUN#{@payment_run.id}-#{paydates}.csv\" )\n end",
"def initialize_csv\n CSV.open(\"results.csv\", \"wb\") do |csv|\n csv << [\"class\", \"title of course\", \"credits\"]\n end\nend",
"def index\n @mailing_list = MailingList.find(params[:mailing_list_id]) \n \n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @customers }\n format.csv { \n csv_file = Customer.to_csv(current_user.id)\n send_data csv_file, :type => 'text/csv', :disposition => 'attachment;\nfilename=output.csv'\n\n \n\n }\n end\n end",
"def save_as_csv\n h = get_townhall_urls\n CSV.open('db/emails.csv', 'wb', write_headers: true, headers: ['Mairies du 95', 'Email']) do |csv|\n h.to_a.each { |elem| csv << elem }\n end\n puts \"\\nTon fichier csv est prêt\\n\\n\"\n Index.new.index\n end",
"def render_csv(filename = nil)\n filename ||= params[:action]\n filename += '.csv'\n \n # String#index returns nil if no match is found\n if request.env['HTTP_USER_AGENT'].index(\"MSIE\")\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n end\n\n render :layout => false\n end",
"def export\n @fans = Fan.active.find(:all)\n\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Name\", \"E-mail\", \"Notes\", \"Section 1 - Description\", \"Section 1 - Email\", \"Section 1 - IM\", \"Section 1 - Phone\", \"Section 1 - Mobile\", \"Section 1 - Pager\", \"Section 1 - Fax\", \"Section 1 - Company\", \"Section 1 - Title\", \"Section 1 - Other\", \"Section 1 - Address\"]\n\n for fan in @fans\n csv << [fan.name,\n fan.email,\n \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\",\n fan.address \n ]\n end\n end\n\n # filename = @list.name.downcase.gsub(/[^0-9a-z]/, \"_\") + \".csv\"\n filename = \"fans.csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def download_problematic_rows\n import_job = ImportJob.find(params[:import_job_id])\n @output_csv_string = import_job.problem_rows_csv_string\n respond_to do |format|\n format.csv do\n filename = \"#{File.basename(import_job.file_name, '.csv')}-problematic_rows-#{Time.now.strftime(\"%Y%m%d\")}.csv\"\n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\"\n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Expires'] = \"0\"\n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\"\n end\n end\n end\n end",
"def generate_csv_file(csv_string,assessment_name)\n variable_time = Time.now.strftime(\"%Y%m%d\")\n #The file name is generated based on time.\n filename = assessment_name+variable_time+\".csv\"\n #invoke send_data of fastercsv gem to generate a csv\n send_data(csv_string, :type => \"text/plain\", :filename => filename)\n end",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def build_csv(file_name)\n\t\t\t\t\t\tarr = [\"Location URN\", \"Email List\"]\n\t\t\t\t\t\t\t\tCSV.open(file_name, \"wb\") do |csv|\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcsv << arr\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tend\nend",
"def export_ansokan_csv\n download_csv(AdminOnly::Reports::ShfApplicationsCsvReport.new, t('.success'), t('.error'))\n end",
"def store_creddentials\n CSV.open(ENV['HOME'] + '/creddentials.csv', 'w') do |csv|\n csv << [@email, @password]\n end\n read_creddentials\n end",
"def render_csv\n report = report_for_rendering\n filename = filename_timestamp(report.title)\n disable_client_cache\n send_data(report.to_csv, :filename => \"#{filename}.csv\")\n end",
"def index\n @cautelas = Cautela.search(params[:search], params[:page])\n respond_to do |format|\n format.html\n #format.csv { send_data(@cautelas.to_csv) }\n format.csv {\n filename = \"CEPI_Cautelas-#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\"\n send_data(@cautelas.to_csv, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename)\n }\n end\n end",
"def csv_for_company\n filter = params[:filter]\n company_name = Company.find(params[:company_id]).name\n csv_name = filter.present? ?\n \"#{company_name}_operations_filtered_by_#{filter}.csv\" :\n \"#{company_name}_operations.csv\"\n respond_to do |format|\n format.csv { send_data to_csv ,filename: csv_name}\n end\n end",
"def export\n @donor = Donor.order(:created_at)\n respond_to do |format|\n format.html\n format.csv { send_data @donor.as_csv, filename: \"Donors Export #{Date.today}.csv\" }\n end\n end",
"def gather_data # The commands that retrieve required data from the User\n puts 'This script takes in a 1 column csv (set your column header to course_id)'\n puts 'Enter the Bridge instance URL (e.g. https://ibennion.bridgeapp.com)'\n @url = gets.chomp! # Prompts user for desired Bridge domain\n puts 'Enter your Bridge API Key'\n @token = gets.chomp!\n puts 'These calls require you masquerade as an admin. What is the admin user ID?'\n @admin_id = gets.chomp! # The 'publish' endpoint requires you masquerade as an admin. Set the admin's User ID here.\n puts 'Enter the path to your CSV mapping file (e.g. /Users/ibennion/Documents/mapping.csv)'\n @csv_path = gets.chomp! # Set your path to the csv file. e.g. '/Users/ccromar/Downloads/sample.csv'\nend",
"def index\n @companies = Company.all\n @top_billing = Company.ordered_by_current_bill\n\n @import = Company::Import.new\n # authorize @companies\n skip_authorization\n respond_to do |format|\n format.html\n format.csv { send_data @companies.to_csv, filename: \"companies-export-#{Time.now}-inclustaff.csv\" }\n \tend\n\n end",
"def export_contacts_to_csv\n start_time = Time.now\n \n contacts = RESULT_COLL.find()\n \n FasterCSV.open(\"../output/healthcare_sg_export.csv\", 'w') {|csv|\n csv << [\"contact_name\", \"country\", \"auction_time\"]\n contacts.each do |row|\n csv << row[\"contact\"]\n end\n }\n p Time.now - start_time\nend",
"def fetch_csv\n convert_to_csv(fetch)\n end",
"def build_csv()\n\t\tconcat_path = \"#{@location}/#{@name}.csv\"\n\n\t\tCSV.open(concat_path, 'wb') do |csv_line|\n\t\t\t\t\t\n\t\t\theaders = ['movie_date', 'title', 'lifetime_gross_sales']\n\t\t\tcsv_line << headers\n\n\t\t\t@cage.each do |cage|\n\t\t\t\tcsv_line << [cage[:movie_date], cage[:title], cage[:life_time_gross]]\n\t\t\tend\n\t\tend\n\tend",
"def download_handoff_report\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure_new(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv_new(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report-New.csv'\n\n \tend",
"def get_individual_grant\n individual = User.find(params[:charity_id])\n if GrantsData.exists?(userID: params[:charity_id])\n csv_data = GrantsData.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_grants_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Grant Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'grants_individual'\n end\n end",
"def export(params={})\n columns = delimited_string_to_array(Settings.export.travel_fields)\n send_data Travel.export(columns), :filename => \"travel.csv\"\n end",
"def generateCSV data\n CSV.open(\"schools.csv\", \"wb\") do |csv|\n csv << [ \"school_name\", \"school_address\", \"school_type\", \"school_url\", \"school_area\"]\n data.each do |cell|\n csv << [ cell[\"school_name\"], cell[\"school_address\"], cell[\"school_type\"], cell[\"school_url\"], cell[\"school_area\"]]\n end\n end\nend",
"def get_rex\n if Project.exists?\n @grant_data = Project.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"research_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:success] = \"Research Expenditure Table is Empty\"\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def save_as_csv\n\t\tCSV.open(\"db/emails.csv\", \"w\") do |csv|\n\t\t\tcsv << [\"Ville\", \"Contact\"]\n\t\t\t@email_town.each_pair do |key, value|\n\t\t\tcsv << [key, value]\n\t\tend\n\t\tend\n\tend",
"def transactions\n @books=Book.all\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"transactions.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\nend",
"def save_csv\n CSV.open(@csv_file_path, \"wb\") do |csv|\n csv << [\"name\", \"description\", \"rating\", \"prep_time\", \"done\"]\n @recipes.each do |recipe|\n csv << [recipe.name, recipe.description, recipe.rating, recipe.prep_time, recipe.done?]\n end\n end\n end",
"def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end",
"def index\n @conditions = Condition.all.order(created_at: :desc)\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"condition-list.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end \n end",
"def save\n CSV.open(\"./db/gossip.csv\", \"ab\") do |csv|\n csv << [\"#{@author}\", \"#{@gossip}\"]\n\t\tend\n \tend",
"def save_students\n file = File.open(\"students.csv\", \"w\")\n @students.each do |student|\n student_data = [student[:name], student[:cohort], student[:hobby], student[:country]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend"
] | [
"0.6814283",
"0.6776089",
"0.6498901",
"0.64953804",
"0.64366996",
"0.64343286",
"0.6416474",
"0.6397308",
"0.63735765",
"0.63432944",
"0.6314342",
"0.6137285",
"0.6128341",
"0.61055076",
"0.60093373",
"0.6004062",
"0.59937435",
"0.59893876",
"0.59883535",
"0.5964434",
"0.59362364",
"0.58889127",
"0.5879775",
"0.58677495",
"0.58617145",
"0.58612895",
"0.58587354",
"0.5855151",
"0.5810393",
"0.57970697",
"0.5792505",
"0.57917124",
"0.57795614",
"0.5757537",
"0.57573986",
"0.5751884",
"0.57422465",
"0.57345676",
"0.57186663",
"0.57100666",
"0.5703413",
"0.56800914",
"0.5677629",
"0.5676544",
"0.5672289",
"0.56708694",
"0.56596214",
"0.5650789",
"0.5644796",
"0.5642121",
"0.5642121",
"0.5641267",
"0.5639801",
"0.56337625",
"0.56323284",
"0.5625974",
"0.5624464",
"0.5620975",
"0.5617249",
"0.5611813",
"0.56018144",
"0.55912185",
"0.5578688",
"0.5577432",
"0.5572573",
"0.5571989",
"0.5571044",
"0.5566431",
"0.5565062",
"0.55531293",
"0.5547848",
"0.5547464",
"0.55474424",
"0.5546767",
"0.5537116",
"0.55338925",
"0.5533561",
"0.552838",
"0.5524505",
"0.55213",
"0.5515543",
"0.5513697",
"0.55084395",
"0.5507435",
"0.5507188",
"0.55001754",
"0.5496732",
"0.54938376",
"0.54894674",
"0.5487672",
"0.5483012",
"0.54796916",
"0.54783386",
"0.54739445",
"0.547113",
"0.5470283",
"0.5458382",
"0.5457115",
"0.54499304",
"0.5445319"
] | 0.64995736 | 2 |
downloads csv of all grant data to users computer | def get_grant_data
if GrantsData.exists?
@grant_data = GrantsData.all_csv
respond_to do |format|
format.html
format.csv do
send_data @grant_data, filename: "all_grants_data_#{Time.now.to_s(:db)}.csv"
end
end
else
flash[:notice] = 'Grant Data Table Empty'
redirect_to :controller => 'amrc_reports', :action => 'reports'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def get_unsubmitted\n @grant_data = User.to_csv_unsubmitted\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"unsubmitted_charity_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def show\n\t\t#@data = CSV.generate do |csv|\n\t\t#\t(1..2000).each_with_index do |user, index|\n\t\t#\t\tcsv << [\"MA_user\" + \"#{index+1}\" + \"@cb.com\", \"12345678\"]\n\t\t#\tend\n\t\t#end\n\t\t#send_data(@data, :type => 'text/csv; charset=utf-8; header=present', :filename => \"MA_user_2000.csv\")\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def get_individual_grant\n individual = User.find(params[:charity_id])\n if GrantsData.exists?(userID: params[:charity_id])\n csv_data = GrantsData.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_grants_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Grant Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'grants_individual'\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def personnel_data\n users = User.all.compact.uniq\n data = generate_csv(users)\n render :template => \"shared/csv_data\", :locals => {:data => data}, :layout => false\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def to_csv_dash_data\n @grant_data = dash_data_create\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_dashboard_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def export\n @transactions = Transaction.find_all_by_user_id(current_user.id)\n csv = \"\"\n i = 0\n @transactions.each do |trans|\n if (i==0)\n csv += trans.to_csv(true)\n else\n csv += trans.to_csv(false)\n end\n i += 1\n end\n\n respond_to do |format|\n format.csv { send_data csv }\n end\n end",
"def index\n @mobiles = Customer.pluck(:mobile)\n @agents = User.where(\"role = ?\", 2).pluck(:fname, :id)\n @franchises = User.where(\"role = ?\", 1).pluck(:fname, :id)\n @customers_csv = (policy_scope Customer).order(\"id desc\")\n @customers = (policy_scope Customer).order(\"id desc\").page params[:page]\n authorize Customer.new, :index?\n \n respond_to do |format|\n format.html\n format.csv { send_data @customers_csv.as_csv }\n end\n\n\n end",
"def download_response_files!\n files_downloaded = []\n File.makedirs(cache_location + '/returns')\n with_ftp do |ftp|\n files = ftp.list('*.csv')\n files.each do |filels|\n size, file = filels.split(/ +/)[4], filels.split(/ +/)[8..-1].join(' ')\n ftp.get(file, cache_location + '/returns/' + user_suffix + '_' + file)\n files_downloaded << file\n end\n end\n files_downloaded\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def export_users(file)\n users = User.all\n\n csv_string = FasterCSV.generate do |csv|\n users.each do |user|\n csv << [user.email, user.first_name, user.last_name]\n end\n end\n\n send_data csv_string,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :disposition => \"attachment; filename=users.csv\"\n File.open(file,\"w\").write\n end",
"def export_students_to_csv\n CSV.generate(headers: true) do |csv|\n csv << %w[perm email first_name last_name github_username]\n\n roster_students.each do |user|\n csv << [\n user.perm,\n user.email,\n user.first_name,\n user.last_name,\n user.username\n ]\n end\n end\n end",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def index\n\t\t@users= User.all\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.csv { send_data Importer.generate(:csv, @users), filename: \"users-#{Date.today}.csv\" }\n\t\tend\n\n\tend",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def admin_index\n authorize User\n\n respond_to do |format|\n format.html do\n @clicked_through = params[:click_through].present?\n @filter_admin = false\n\n @users = if current_user.can_super_admin?\n User.includes(:department, :org, :perms, :roles, :identifiers).page(1)\n else\n current_user.org.users\n .includes(:department, :org, :perms, :roles, :identifiers)\n .page(1)\n end\n end\n\n format.csv do\n send_data User.to_csv(current_user.org.users.order(:surname)),\n filename: \"users-accounts-#{Date.today}.csv\"\n end\n end\n end",
"def hv_export_users(oHarvest, iDbg = 0)\n\n\tusers = oHarvest.users.all\n\n summary = Array.new()\n\n\tusers.each do |u|\n if (u.is_active == true && u.is_admin == false)\n p_user = Array.new(5)\n p_user[0] = u.first_name\n p_user[1] = u.id\n p_user[2] = u.email\n p_user[3] = u.department\n p_user[4] = u.last_name\n \n summary.push(p_user)\n end\n end\n\n summary = summary.sort { |x, y|\n x[0] <=> y[0]\n }\n\n file = get_config(\"COMMON\",\t\"CSVPath\") + get_config(\"Harvest\", \"MUsers\")\n flush_to_csv(summary, file, true)\n\nend",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def index\n @companies = Company.all\n @top_billing = Company.ordered_by_current_bill\n\n @import = Company::Import.new\n # authorize @companies\n skip_authorization\n respond_to do |format|\n format.html\n format.csv { send_data @companies.to_csv, filename: \"companies-export-#{Time.now}-inclustaff.csv\" }\n \tend\n\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def download_sample\n if can?(:>, \"4\")\n send_file Rails.public_path+\"/excel/Import/sample_file_part_upload.csv\", :disposition => \"attachment\"\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def export\n @fans = Fan.active.find(:all)\n\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Name\", \"E-mail\", \"Notes\", \"Section 1 - Description\", \"Section 1 - Email\", \"Section 1 - IM\", \"Section 1 - Phone\", \"Section 1 - Mobile\", \"Section 1 - Pager\", \"Section 1 - Fax\", \"Section 1 - Company\", \"Section 1 - Title\", \"Section 1 - Other\", \"Section 1 - Address\"]\n\n for fan in @fans\n csv << [fan.name,\n fan.email,\n \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\",\n fan.address \n ]\n end\n end\n\n # filename = @list.name.downcase.gsub(/[^0-9a-z]/, \"_\") + \".csv\"\n filename = \"fans.csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def get_rex\n if Project.exists?\n @grant_data = Project.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"research_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:success] = \"Research Expenditure Table is Empty\"\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def download_all\n if platinum_user_and_above?\n @domains=Domain.where(\"name is not null\")\n template = Rails.root.join(\"app\",\"views\",\"reports\", \"DomainPortfolio-template.xlsx\")\n workbook = RubyXL::Parser.parse(template)\n worksheet = workbook.worksheets[0]\n worksheet.sheet_name = 'All'\n index = 0\n @domains.each do |domain|\n next if domain.name.nil?\n next if domain.name.empty?\n index += 1\n if domain.transferable\n my_row = [domain.name, \"yes\"]\n else\n my_row = [domain.name, \"no\"]\n end\n worksheet_write_row(worksheet,index, my_row)\n end\n file = \"DomainPortfolio-All-\" + Time.now.strftime('%m%d%Y') + \".xlsx\"\n send_data workbook.stream.string, filename: file, disposition: 'attachment'\n else\n redirect_back :fallback_location => root_path, :alert => \"Access denied.\"\n end\n end",
"def download_prices() \n\t\t \t \t \t\n\t\t\t\t@dropbox_token = DropboxSession.deserialize(Linkdropbox.first.dropbox_token)\n\t\t\t\tclient = DropboxClient.new(@dropbox_token)\n\t\t\t\tcontents, metadata = client.get_file_and_metadata('Grupo8/DBPrecios.accdb')\n\t\t\t\tbegin\n\t\t\t\t open('public/jars/DBPrecios.accdb', 'wb') {|f| f.puts contents }\n\t\t\t\t \n\t\t\t\t \n\t\t\t\trescue\n\t\t\t\t flash[:success] = \"Exception occured while downloading...\"\t\t\n\n\t \t\tend \n\n\t \t\tLinkdropbox.import_prices_to_csv\t\n\tend",
"def export\n send_data current_user.records.select(:json).order('updated_at desc').collect(&:json).to_json, filename: 'records.json'\n end",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def grader_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n\n students = Student.left_outer_joins(:user, grade_entry_students: [tas: :user])\n .where('grade_entry_students.assessment_id': grade_entry_form.id)\n .order('users.user_name', 'users_roles.user_name')\n .pluck('users.user_name', 'users_roles.user_name')\n .group_by { |x| x[0] }\n .to_a\n\n file_out = MarkusCsv.generate(students) do |student, graders|\n [student] + graders.pluck(1)\n end\n\n send_data file_out,\n type: 'text/csv',\n disposition: 'attachment',\n filename: \"#{grade_entry_form.short_identifier}_grader_mapping.csv\"\n end",
"def get_harvest_users(harvest_webapp, dbg = 0)\n\n member_page = \"/team\"\n pt = get_config(\"COMMON\", \"CSVPath\")\n um = get_config(\"Harvest\", \"MUsers\")\n\n site = harvest_webapp\n\n begin\n \n # move to User list\n site.Go(member_page)\n \n # save user list to csv\n data = site.RetrieveList(\"li.manage-list-item\", method(:proc_split_list_to_array))\n\n p data if dbg\n \n # append email and dept_code\n data.each do |member|\n if (member[0] != \"Admin\")\n site.Go(\"/people/\" + member[1] + \"/edit#profile_base\")\n member.push(site.GetItem(\"#user_email\").attribute(\"value\").value.strip);\n member.push(site.GetItem(\"#user_department\").attribute(\"value\").value.strip);\n else\n member.push(\"\");\n member.push(\"\");\n end\n end\n \n # sort by dept_code\n sorted = data.sort { |a, b|\n a[3] <=> b[3]\n }\n \n # flush to file\n flush_to_csv(sorted, pt + um)\n \n p (pt + um) if dbg\n \n rescue => e\n\n p e\n p e.backtrace\n \n end\n \nend",
"def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end",
"def export_data(tables, min_id=false, max_id=false)\n pause_replication if @master && ! @repl_paused\n import_export_user = 'jetpants'\n create_user(import_export_user)\n grant_privileges(import_export_user) # standard privs\n grant_privileges(import_export_user, '*', 'FILE') # FILE global privs\n reconnect(user: import_export_user)\n @counts ||= {}\n tables.each {|t| @counts[t.name] = export_table_data t, min_id, max_id}\n ensure\n reconnect(user: app_credentials[:user])\n drop_user import_export_user\n end",
"def export_csv\n\n group_array = []\n @page = 1\n @per_page = 50\n\n groups = @context.get(:groups, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n group_array << groups\n group_array, group_hash = check_paging(groups, group_array, \"groups\", @context, true)\n\n group_array.each_with_index do |group, index|\n is_new = index == 0 ? true : false\n membership_array = []\n @page = 1\n\n group_model = Group.find(group['id'], :params => { :access_token => ENV[\"API_TOKEN\"] })\n memberships = group_model.get(:memberships, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n membership_array << memberships\n membership_array, @membership_hash = check_paging(memberships, membership_array, \"memberships\", group_model, is_new)\n end\n\n export_data = [group_array, @membership_hash]\n perform_export(export_data) \n\n respond_to do |format|\n format.html { render :inline => \"<a href=<%= @download_url %>>Download CSV</a>\" }\n format.json { render :json => @download_url.to_json }\n end\n end",
"def index\n\t\t@leaders=Leader.all.order(\"first_name ASC, last_name ASC\")\n\n\t respond_to do |format|\n\t format.html\n\t format.csv do\n\t filename = \"mpate-\" + params[:controller] + \"-\" + Time.now.strftime(\"%m-%e-%Y\")\n\t headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n\t headers['Content-Type'] ||= 'text/csv'\n\t end\n\t end\n\tend",
"def download\n @posts = PostService.getAllPosts\n respond_to do |format|\n format.html\n format.csv { send_data @posts.to_csv, :filename => \"Post List.csv\" }\n end\n end",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_to_csv(users)\n CSV.open(\"./user.csv\", \"wb\") do |csv|\n csv << Universe.attribute_names\n users.each do |user|\n csv << user.attributes.values\n end\n end\n end",
"def main()\n request_url = \"#{$canvas_url}/api/v1/users/#{$canvas_user_id}/page_views?per_page=100&start_time=#{$start_time}&end_time=#{$end_time}\"\n method = \"get\"\n options = {}\n data = canvasApiRequest(method,request_url,options)\n compiledHash = []\n data.each do |hash|\n hashData = flattenHash(hash)\n compiledHash.push(hashData)\n end\n outputToCSV(compiledHash)\nend",
"def export_data(tables, min_id=false, max_id=false, infinity=false)\n pause_replication if @master && ! @repl_paused\n import_export_user = 'jetpants'\n create_user(import_export_user)\n grant_privileges(import_export_user) # standard privs\n grant_privileges(import_export_user, '*', 'FILE') # FILE global privs\n reconnect(user: import_export_user)\n @counts ||= {}\n tables.each {|t| @counts[t.name] = export_table_data t, min_id, max_id, infinity}\n ensure\n reconnect(user: app_credentials[:user])\n drop_user import_export_user\n end",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def download\r\n\r\n @restricted_fields = []\r\n @no_menubar\t= true\r\n @no_links\t= true\r\n @no_filterbar\t= true\r\n @no_row_links\t= true\r\n pagin_opts\t\t = {:include => [:customers,:user_accesses,:profiles,:dev_feedbacks,:file_imports]}\r\n pagin_opts[:order]\t = @default_order if @default_order\r\n pagin_opts[:order]\t||= \"users.#{params[:sort_by]}\" if !params[:sort_by].blank?\r\n pagin_opts[:conditions] ||= @default_filter\r\n pagin_opts[:joins]\t||= @joins_fields || []\r\n\r\n # pagin_opts[:select]\t||= \"`users`.`login`,`users`.`first_name`,`users`.`last_name`,`users`.`email`,`users`.`telephone`,`users`.`language`,`users`.`active`,`users`.`salt`\" unless params[:format] == 'html'\r\n\r\n if params[:id] && params[:id].to_i > 0\r\n @user = User.find_by_id(params[:id], pagin_opts)\r\n if !@user\r\n flash[:warning] = _(\"Error: %{obj} not found!\") % {:obj => _(%q[User])}\r\n begin\r\n redirect_to :back\r\n rescue\r\n redirect_to :action => :list\r\n end\r\n return\r\n end\r\n f_name = @user.disp_name\r\n respond_to do |format|\r\n format.html {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/show.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"#{f_name}.html\", :disposition => 'attachment', :type => 'text/html;charset=utf-8') }\r\n format.doc {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/show.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"#{f_name}.doc\", :disposition => 'attachment', :type => 'application/msword;charset=utf-8') }\r\n format.pdf {\r\n @pdf = true\r\n @debug_pdf = params[:debug_pdf]\r\n params[:format] = 'html'\r\n html = render_to_string(:template => '/users/show.html.erb', :format => :html, :id => @user.id, :layout => 'pdf')\r\n html.gsub!(/\\/images\\//, Rails.root.join('public', 'images/')) if !params[:debug_pdf]\r\n render(:text => html, :layout => 'pdf') and return if params[:debug_pdf]\r\n kit = PDFKit.new(html, :encoding => 'UTF-8')\r\n kit.stylesheets << Rails.root.join('public', 'stylesheets', 'pdf.css')\r\n pdf = kit.to_pdf\r\n send_data(pdf, :filename => \"#{@user.disp_name}.pdf\") unless params[:debug_pdf] || pdf.blank?\r\n\r\n # send_data(render_to_string(:format => :html, :layout => false, :action => :show, :id => @user.id), :filename => \"#{f_name}.pdf\", :disposition => 'attachment', :type => 'application/pdf;charset=utf-8')\r\n\r\n return\r\n }\r\n format.xml {\r\n send_data(@user.to_xml, :filename => \"#{f_name}.xml\", :disposition => 'attachment', :type => 'text/xml;charset=utf-8')}\r\n format.json {\r\n send_data(@user.to_json, :filename => \"#{f_name}.json\", :disposition => 'attachment', :type => 'text/json;charset=utf-8')}\r\n format.xls {\r\n book = Spreadsheet::Workbook.new\r\n sheet = book.create_worksheet(:name => \"users\")\r\n sheet.row(0).concat([\"Login\", \"First Name\", \"Last Name\", \"Email\", \"Telephone\", \"Language\", \"Active\", \"Salt\", \"User Accesses\", \"Profiles\"])\r\n sheet.row(1).replace([@user.login, @user.first_name, @user.last_name, @user.email, @user.telephone, @user.language, @user.active, @user.salt, @user.user_accesses.map(&:disp_name).to_sentence, @user.profiles.map(&:disp_name).to_sentence])\r\n fname = \"users.xls\"\r\n tmp = Tempfile.new(fname)\r\n book.write(tmp.path)\r\n tmp.close\r\n send_file(tmp.path, :filename => fname)\r\n }\r\n format.csv { row = @user\r\n @csv_string = FasterCSV.generate({:encoding => 'UTF-8', :col_sep => (SystemSetting['csv_export_separator'] || ';')}) do |csv|\r\n cols = []\r\n User::FIELDS.each{|k,v| cols << k if [:string, :text, :integer, :float, :date, :time, :datetime, :timestamp, :ho_assoc, :bt_assoc].include? v}\r\n cols.reject!{|c| [:customers, :accesses, :dev_feedbacks, :file_imports].include?(c) }\r\n csv << cols.map{|c|\r\n if [:ho_assoc, :bt_assoc].include?(User::FIELDS[c])\r\n v = row.send(c) ; v ? v.disp_name : nil\r\n else\r\n row.send(c)\r\n end\r\n }\r\n end\r\n @export_encoding ||= SystemSetting['csv_export_encoding'] || 'UTF-16LE'\r\n conv = Iconv.new(@export_encoding, 'UTF-8')\r\n send_data(conv.iconv(@csv_string), :filename => \"#{f_name}.csv\", :disposition => 'attachment', :type => \"text/csv;charset=#{@export_encoding.downcase}\")\r\n return\r\n }\r\n end\r\n else\r\n pagin_opts[:page] = 1\r\n pagin_opts[:per_page] = User.count+1\r\n @users = User.paginate(pagin_opts)\r\n respond_to do |format|\r\n format.html {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/list.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"users.html\", :disposition => 'attachment', :type => 'text/html;charset=utf-8') }\r\n format.doc {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/list.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"users.doc\", :disposition => 'attachment', :type => 'application/msword;charset=utf-8') }\r\n format.pdf {\r\n @pdf = true\r\n @debug_pdf = params[:debug_pdf]\r\n params[:format] = 'html'\r\n html = render_to_string(:template => '/users/list.html.erb', :layout => 'pdf')\r\n html.gsub!(/\\/images\\//, Rails.root.join('public', 'images/')) if !params[:debug_pdf]\r\n render(:text => html, :layout => 'pdf') and return if params[:debug_pdf]\r\n kit = PDFKit.new(html, :encoding => 'UTF-8')\r\n kit.stylesheets << Rails.root.join('public', 'stylesheets', 'pdf.css')\r\n pdf = kit.to_pdf\r\n send_data(pdf, :filename => \"users.pdf\") unless params[:debug_pdf] || pdf.blank?\r\n\r\n # send_data(render_to_string(:layout => false, :action => :list), :filename => \"users.pdf\", :disposition => 'attachment', :type => 'application/pdf;charset=utf-8')\r\n\r\n }\r\n format.xml {\r\n send_data(@users.to_xml, :filename => ('users.xml'), :disposition => 'attachment', :type => 'text/xml;charset=utf-8')}\r\n format.xls {\r\n book = Spreadsheet::Workbook.new\r\n sheet = book.create_worksheet(:name => \"users\")\r\n sheet.row(0).concat([\"Login\", \"First Name\", \"Last Name\", \"Email\", \"Telephone\", \"Language\", \"Active\", \"Salt\", \"User Accesses\", \"Profiles\"])\r\n @users.each_with_index do |row,i|\r\n sheet.row(i+1).replace([row.login, row.first_name, row.last_name, row.email, row.telephone, row.language, row.active, row.salt, row.user_accesses.map(&:disp_name).to_sentence, row.profiles.map(&:disp_name).to_sentence])\r\n end\r\n fname = \"users.xls\"\r\n tmp = Tempfile.new(fname)\r\n book.write(tmp.path)\r\n tmp.close\r\n send_file(tmp.path, :filename => fname)\r\n }\r\n format.csv {\r\n @csv_string = FasterCSV.generate({:encoding => 'UTF-8', :col_sep => (SystemSetting['csv_export_separator'] || ';')}) do |csv|\r\n cols = []\r\n User::FIELDS.each{|k,v| cols << k if [:string, :text, :integer, :float, :date, :time, :datetime, :timestamp, :ho_assoc, :bt_assoc].include? v}\r\n cols.reject!{|c| [:customers, :accesses, :dev_feedbacks, :file_imports].include?(c) }\r\n csv << cols.map{|c| _(c.titleize)}\r\n @users.map{|row|\r\n csv << cols.map {|c|\r\n if [:ho_assoc, :bt_assoc].include?(User::FIELDS[c])\r\n v = row.send(c) ; v ? v.disp_name : nil\r\n else\r\n row.send(c)\r\n end\r\n }\r\n }\r\n end\r\n @export_encoding ||= SystemSetting['csv_export_encoding'] || 'UTF-16LE'\r\n conv = Iconv.new(@export_encoding, 'UTF-8')\r\n send_data(conv.iconv(@csv_string), :filename => \"users.csv\", :disposition => 'attachment', :type => \"text/csv;charset=#{@export_encoding.downcase}\")\r\n }\r\n end\r\n end\r\n end",
"def download_table\n params = download_table_params\n table_name = params[:table_name].parameterize.underscore\n action_name = table_name + \"_table\"\n table_url = self.send(\"#{table_name}_table_admin_reports_path\") + \".csv\"\n filters = params.except(:table_name).to_h\n \n redirect_to({\n controller: 'reports', \n action: action_name, \n format: :csv\n }.merge(filters))\n end",
"def export_ansokan_csv\n download_csv(AdminOnly::Reports::ShfApplicationsCsvReport.new, t('.success'), t('.error'))\n end",
"def export_histories\n @profiles = User.all if current_user.is? :admin\n\n if @profiles\n path = 'export/'\n name = 'alle_profile_historien.csv'\n File.new(path + name, \"w\").path\n input = \"\"\n\n @profiles.each do |profi|\n \n File.open(path+name, \"w\") do |histories|\n \n input << \"Name, Vorname, Kunden-Nr.,\\n\"\n input << \"#{profi.last_name}, #{profi.first_name}, #{profi.user_number}\\n\"\n\n packages = profi.packages\n orders = profi.orders\n\n unless packages.blank?\n input << \"Pakete-Historie,\" + \"\\n\"\n input << \"Paket-Nr., Erstellt am, Anzahl Kleider, Geschlecht, Beschreibung, Labels,\" + \"\\n\"\n packages.each do |package|\n input << \"#{package.serial_number},#{formatted_date(package.created_at)},#{package.amount_clothes},#{package.sex == true ? \"Mädchen\" : \"Junge\"},\"\n input << \"#{package.notice.gsub(\",\", \" \")},\" unless package.notice.nil?\n input << \"#{package.label.gsub(\",\", \" \").gsub(\"--\", \" \")},\" unless package.label.nil?\n input << \"\\n\"\n end\n end\n \n unless orders.blank?\n input << \"\\n\"+ \"Bestell-Historie,\" + \"\\n\"\n input << \"Bestell-Nr., Bestellt am, Bewerted am , Bewertung, Angekommen?,\" + \"\\n\"\n orders.each do |order|\n input << \"#{order.order_number},#{formatted_date(order.created_at)}, #{formatted_date(order.eva_date_created_at)},\"\n input << \"#{I18n.t(order.evaluation.to_sym)},\" if order.evaluation\n input << \"#{order.received == true ? \"Nein\" : \"Ja\"}\" + \"\\n\"\n end\n\n input << \"\\n\"\n end\n puts input\n histories.write(input) unless input.blank?\n end\n end\n send_file(path+name) \n end\n\n end",
"def export_archive\n if flash[:archive]\n if flash[:archive] == \"true\"\n result = Urlmaster.where(:archive => \"archive\").order(:archiveTime => :desc)\n nameArc = \"Archive\"\n elsif flash[:archive] == \"false\"\n result = Urlmaster.where(:archive => nil)\n nameArc = \"NotArchive\"\n end \n end\n # `result` variable contains list needed to export\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-\" + nameArc + \"-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n end",
"def get_csv\n CSV.generate do |csv|\n csv << ['Name','Student ID','User ID','Role','Email Address','Sections']\n get_feed[:students].each do |student|\n name = student[:last_name] + ', ' + student[:first_name]\n user_id = student[:login_id]\n student_id = student[:student_id]\n email_address = student[:email]\n role = ENROLL_STATUS_TO_CSV_ROLE[student[:enroll_status]]\n sections = sections_to_name_string(student[:sections])\n csv << [name, student_id, user_id, role, email_address, sections]\n end\n end\n end",
"def index\n @mailing_list = MailingList.find(params[:mailing_list_id]) \n \n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @customers }\n format.csv { \n csv_file = Customer.to_csv(current_user.id)\n send_data csv_file, :type => 'text/csv', :disposition => 'attachment;\nfilename=output.csv'\n\n \n\n }\n end\n end",
"def index\n if current_user.admin?\n @app_schedules = AppSchedule.all\n\n respond_to do |format|\n format.html\n format.csv { send_data @app_schedules.to_csv, filename: \"appsched-#{Date.today}.csv\"}\n end\n else\n @app_schedules = current_user.app_schedules.where(user_id: current_user)\n respond_to do |format|\n format.html\n format.csv { send_data @app_schedules.to_csv, filename: \"appsched-#{Date.today}.csv\"}\n end\n end\n end",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def index\n authorize! :index, Walkathon::Pledge.new\n @walkathon_pledges = Walkathon::Pledge.includes(:student)\n if params[:student_id]\n @walkathon_pledges = @walkathon_pledges.where(student_id: params[:student_id])\n @student = Student.find(params[:student_id])\n end\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"pledges.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n end",
"def index\n\t\t@managers=Manager.all\n\n respond_to do |format|\n format.html\n format.csv do\n filename = \"mpate-\" + params[:controller] + \"-\" + Time.now.strftime(\"%m-%e-%Y\")\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n\tend",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def show\n respond_to do |format|\n format.html\n format.csv { send_data @cycle.to_csv, filename: \"users-#{Date.today}.csv\" }\n end\n end",
"def download\n if platinum_user_and_above?\n urls=SiteUrl.where(\"site is not null\")\n workbook = RubyXL::Workbook.new\n worksheet = workbook.worksheets[0]\n worksheet.sheet_name = 'urls'\n header = [\"Site\",\"Url\",\"Last Update\"]\n index = 0\n worksheet_write_row(worksheet,index,header)\n urls.each do |url|\n next if url.site.nil?\n next if url.site.empty?\n index += 1\n my_row = [url.site, url.url, url.updated_at]\n worksheet_write_row(worksheet,index, my_row)\n end\n file = \"Discovered_Urls_\" + Time.now.strftime('%m%d%Y') + \".xlsx\"\n send_data workbook.stream.string, filename: file, disposition: 'attachment'\n else\n redirect_back :fallback_location => root_path, :alert => \"Access denied.\"\n end\n end",
"def generate\n @student_users = StudentUser.all\n respond_to do |format|\n format.html\n format.csv { send_data @student_users.to_csv }\n format.xls { send_data @student_users.to_csv(col_sep: \"\\t\") }\n end\n end",
"def csv_lead_owner_export\n CSV.generate do |csv|\n header = []\n header << 'Lead_owner'\n header << 'Applicant type'\n header << 'University Name'\n header << 'Braven Region'\n\n csv << header\n\n LeadOwnerMapping.all.each do |m|\n exportable = []\n exportable << m.lead_owner\n exportable << m.applicant_type\n exportable << m.university_name\n exportable << m.bz_region\n\n csv << exportable\n end\n end\n end",
"def csv_data\n sql = Utils.setup_basic_filters(SqlAssembler.new, @params.merge('site_id' => @site[:id]))\n sql.set_select 'SELECT package_name, success, title, digitool_id, islandora_pid, content_model, time_started, time_finished, bytes_ingested FROM islandora_packages'\n sql.set_order 'ORDER BY id DESC'\n return sql.execute\n end",
"def download_csv_file_most_seached_merchants\n if (params[:start_date]&¶ms[:end_date]).blank?\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.all.order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at\n writer << csv_value.map(&:inspect).join(', ')\n writer << \"\\n\"\n end\n end\n send_file(file_name)\n else\n start_date = Date.strptime(params[:start_date], \"%m/%d/%Y\")\n end_date = Date.strptime(params[:end_date], \"%m/%d/%Y\")\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant#{start_date}_to_#{end_date}.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.where(\"DATE(created_at) >= ? AND DATE(created_at) <= ?\", start_date, end_date).order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at, merchant.updated_at\n writer << csv_value\n writer << \"\\n\"\n end\n end\n \n send_file(file_name)\n end\n end",
"def getCSV(restid)\n # get all ordered items of users who ordered in restaurant \"restid\"\n userItems=OrderMng.getAllUsersItemsByRestID(restid)\n csv=[]\n for userItem in userItems\n tmp=Array.new\n for item in userItem\n tmp.push(item)\n end\n csv.push(tmp)\n end\n puts(\"B4 P\")\n p csv\n p userItems\n return userItems\nend",
"def index\n @accounts = Account.all\n respond_to do |format|\n format.html \n format.json\n format.csv { send_data @accounts.to_csv, filename: \"accounts-#{Date.today}.csv\" }\n end\n end",
"def export\n @donor = Donor.order(:created_at)\n respond_to do |format|\n format.html\n format.csv { send_data @donor.as_csv, filename: \"Donors Export #{Date.today}.csv\" }\n end\n end",
"def table_1\n @table1 = read_table(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"))\n #send_file(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"), :type => 'text/csv', :disposition => 'inline')\n end",
"def index\n @backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @backend_tutorial_stats }\n format.csv do\n backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n data = Backend::TutorialStat.download_tutorial_stats_csv(backend_tutorial_stats) \n filename = \"tutorial_stats#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\" \n send_data(data, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename) \n end\n end \n end",
"def index\n tasks = Task.where(user_id: session[:user_id])\n @tasks = tasks.sort_by(&:created_at).reverse\n\n #binding.pry\n\n respond_to do |format|\n format.html\n format.csv { send_data as_csv(@tasks), filename: \"tasks-#{Date.today}.csv\" }\n end\n end",
"def index\n @pagetitle = \"Suppliers\"\n \n @companies = Company.all\n\n @path = 'suppliers'\n\n @suppliercsv = Supplier.all \n respond_to do |format|\n format.html\n format.csv { send_data @suppliercsv.to_csv }\n \n end\n\n\n end",
"def index\n @search = Claim.order('claims.created_at desc').search(params[:q])\n @claims = @search.result(:distinct => true).paginate(:per_page => 50, :page => params[:page])\n respond_to do |format|\n format.html{}\n format.csv {\n send_data generate_csv, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=claims_list.csv\" \n }\n end\n end",
"def export_accounts\n # TODO: THERE'S DUPLICATION FROM InvoicesController in CSV setup.\n\n # These charsets are expected to be common in our users.\n charset = (request_from_a_mac? ? \"MacRoman\" : \"ISO-8859-1\")\n norm = lambda {|o| Iconv.conv(\"#{charset}//IGNORE\", \"UTF-8\", o.to_s)}\n col_sep = (request_from_windows? ? \",\" : ';') # Excel understands this one automatically\n row_sep = (request_from_windows? ? \"\\r\\n\" : \"\\n\") # in case people treat it as a text file\n\n csv_string = FasterCSV.generate(:col_sep => col_sep, :row_sep => row_sep) do |csv|\n csv << %w(Nombre Antiguedad Login Facturas Clientes).map {|h| norm.call(h)}\n # this iterator is provided by pseudo_cursors\n Account.find_each(:include => :owner) do |account|\n csv << [\n account.name,\n format_date(account.created_at.to_date),\n (format_date(account.owner.last_seen_at.to_date) rescue ''),\n account.invoices.count,\n account.customers.count\n ].map {|h| norm.call(h)}\n end\n end\n send_data(csv_string, :type => \"text/csv; charset=#{charset}\", :filename => \"accounts_#{Time.now.strftime(\"%Y%m%d\")}.csv\")\n end",
"def download_data\n file_content = nil\n begin\n file_content = FileContent.find(params[:id])\n rescue\n file_content = nil\n end\n\n # We need to figure out which groups are allowed to download this file content.\n # Unfortunately, this requires iterating through any referenced URLs and collecting\n # all applicable group_ids.\n group_ids = []\n if (!file_content.nil? &&\n !file_content.data.nil?)\n file_content.process_files.each do |process_file|\n if (!process_file.os_process.nil? &&\n !process_file.os_process.fingerprint.nil? &&\n !process_file.os_process.fingerprint.url.nil?)\n # Clear the cache, if need be.\n process_file.os_process.fingerprint.url.expire_caches\n group_ids << process_file.os_process.fingerprint.url.group_id\n end\n end\n group_ids.uniq!\n end\n\n if (!file_content.nil? &&\n !file_content.data.nil? &&\n (!group_ids.index(nil).nil? ||\n current_user.has_role?(:admin) ||\n ((current_user.groups.map{|g| g.is_a?(Group) ? g.id : g} & group_ids).size > 0)))\n send_file(RAILS_ROOT + '/' + file_content.data.to_s, :x_sendfile => true)\n else\n redirect_back_or_default('/')\n end\n end",
"def index\n @trainers = Trainer.paginate(:page => params[:page]).order(email_dirigeant: :desc, crawled_for_email: :desc)\n respond_to do |format|\n format.html\n format.csv { send_data Trainer.all.to_csv}\n end\n end",
"def store_creddentials\n CSV.open(ENV['HOME'] + '/creddentials.csv', 'w') do |csv|\n csv << [@email, @password]\n end\n read_creddentials\n end",
"def show\n respond_to do |format|\n format.html\n format.json\n format.csv {\n send_data(to_csv(@user), filename: @user.name + '_history.csv')\n }\n end\n end",
"def download\n create_agent\n login\n fetch_feed\n create_catalog\n download_catalog\n end",
"def export_data\r\n folder = \"\\data\"\r\n FileUtils.mkdir_p folder\r\n CSV.open(File.join(folder, @output_stats_name), 'wb') do |csv|\r\n csv << @global_bests\r\n csv << @average_global_bests\r\n @neighbourhoods_list[0].report_particles.each do |x|\r\n csv << x\r\n end\r\n end\r\n end",
"def index\n @lists = current_user.lists\n respond_to do |format|\n format.html\n # Configuration for the pdf and the csv\n format.pdf { render template: 'lists/pdf', pdf: 'pdf'}\n format.csv { send_data @lists.to_csv, filename: \"csv-#{Date.today}.csv\" }\n end\n end",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end"
] | [
"0.7116747",
"0.692551",
"0.674647",
"0.6619299",
"0.65484095",
"0.6452047",
"0.63973594",
"0.63890463",
"0.6355495",
"0.6330534",
"0.63282967",
"0.63040084",
"0.62965393",
"0.6282694",
"0.62816906",
"0.6239026",
"0.6235171",
"0.6225189",
"0.6220521",
"0.6188535",
"0.6173388",
"0.6135442",
"0.61311567",
"0.6107037",
"0.6076775",
"0.6063285",
"0.60449207",
"0.6017837",
"0.6013902",
"0.60047",
"0.5985064",
"0.5963614",
"0.59357655",
"0.5931976",
"0.5927979",
"0.5923306",
"0.59211427",
"0.5907001",
"0.59045804",
"0.58964944",
"0.58819705",
"0.5868799",
"0.5855946",
"0.58409315",
"0.58373445",
"0.58225226",
"0.58214414",
"0.5813609",
"0.57904255",
"0.5788943",
"0.5785948",
"0.5785874",
"0.5783116",
"0.57806885",
"0.5776182",
"0.57726336",
"0.57619894",
"0.5741192",
"0.5737814",
"0.57324743",
"0.57261974",
"0.57252043",
"0.5724452",
"0.57210755",
"0.5717216",
"0.57137215",
"0.5691831",
"0.5686926",
"0.5685922",
"0.56746286",
"0.5626218",
"0.56191856",
"0.5618295",
"0.5611626",
"0.5610402",
"0.56044894",
"0.5600655",
"0.5597553",
"0.559571",
"0.5590476",
"0.5587097",
"0.55869347",
"0.55647844",
"0.55614996",
"0.55581486",
"0.55533195",
"0.5549602",
"0.55489874",
"0.55477375",
"0.5532147",
"0.55267507",
"0.5519332",
"0.55111074",
"0.5507795",
"0.5503428",
"0.5493037",
"0.54905623",
"0.5484344",
"0.5484034",
"0.54810876"
] | 0.70627373 | 1 |
downloads csv of all daashboard data to users computer | def to_csv_dash_data
@grant_data = dash_data_create
respond_to do |format|
format.html
format.csv do
send_data @grant_data, filename: "all_dashboard_data#{Time.now.to_s(:db)}.csv"
end
# format.CSV {render csv: @grant_data.to_csv}
# format.xls {render text: @grant_data.to_csv(col_sep: "\t")}
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def export_ansokan_csv\n download_csv(AdminOnly::Reports::ShfApplicationsCsvReport.new, t('.success'), t('.error'))\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def export_data\r\n folder = \"\\data\"\r\n FileUtils.mkdir_p folder\r\n CSV.open(File.join(folder, @output_stats_name), 'wb') do |csv|\r\n csv << @global_bests\r\n csv << @average_global_bests\r\n @neighbourhoods_list[0].report_particles.each do |x|\r\n csv << x\r\n end\r\n end\r\n end",
"def export_archive\n if flash[:archive]\n if flash[:archive] == \"true\"\n result = Urlmaster.where(:archive => \"archive\").order(:archiveTime => :desc)\n nameArc = \"Archive\"\n elsif flash[:archive] == \"false\"\n result = Urlmaster.where(:archive => nil)\n nameArc = \"NotArchive\"\n end \n end\n # `result` variable contains list needed to export\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-\" + nameArc + \"-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def download\n @posts = PostService.getAllPosts\n respond_to do |format|\n format.html\n format.csv { send_data @posts.to_csv, :filename => \"Post List.csv\" }\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def download_table\n params = download_table_params\n table_name = params[:table_name].parameterize.underscore\n action_name = table_name + \"_table\"\n table_url = self.send(\"#{table_name}_table_admin_reports_path\") + \".csv\"\n filters = params.except(:table_name).to_h\n \n redirect_to({\n controller: 'reports', \n action: action_name, \n format: :csv\n }.merge(filters))\n end",
"def show\n\t\t#@data = CSV.generate do |csv|\n\t\t#\t(1..2000).each_with_index do |user, index|\n\t\t#\t\tcsv << [\"MA_user\" + \"#{index+1}\" + \"@cb.com\", \"12345678\"]\n\t\t#\tend\n\t\t#end\n\t\t#send_data(@data, :type => 'text/csv; charset=utf-8; header=present', :filename => \"MA_user_2000.csv\")\n end",
"def index\n @backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @backend_tutorial_stats }\n format.csv do\n backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n data = Backend::TutorialStat.download_tutorial_stats_csv(backend_tutorial_stats) \n filename = \"tutorial_stats#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\" \n send_data(data, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename) \n end\n end \n end",
"def export\n @fans = Fan.active.find(:all)\n\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Name\", \"E-mail\", \"Notes\", \"Section 1 - Description\", \"Section 1 - Email\", \"Section 1 - IM\", \"Section 1 - Phone\", \"Section 1 - Mobile\", \"Section 1 - Pager\", \"Section 1 - Fax\", \"Section 1 - Company\", \"Section 1 - Title\", \"Section 1 - Other\", \"Section 1 - Address\"]\n\n for fan in @fans\n csv << [fan.name,\n fan.email,\n \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\",\n fan.address \n ]\n end\n end\n\n # filename = @list.name.downcase.gsub(/[^0-9a-z]/, \"_\") + \".csv\"\n filename = \"fans.csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def get_grant_data\n if GrantsData.exists?\n @grant_data = GrantsData.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_grants_data_#{Time.now.to_s(:db)}.csv\"\n end\n end\n else\n flash[:notice] = 'Grant Data Table Empty'\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def download_response_files!\n files_downloaded = []\n File.makedirs(cache_location + '/returns')\n with_ftp do |ftp|\n files = ftp.list('*.csv')\n files.each do |filels|\n size, file = filels.split(/ +/)[4], filels.split(/ +/)[8..-1].join(' ')\n ftp.get(file, cache_location + '/returns/' + user_suffix + '_' + file)\n files_downloaded << file\n end\n end\n files_downloaded\n end",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def download_all\n if platinum_user_and_above?\n @domains=Domain.where(\"name is not null\")\n template = Rails.root.join(\"app\",\"views\",\"reports\", \"DomainPortfolio-template.xlsx\")\n workbook = RubyXL::Parser.parse(template)\n worksheet = workbook.worksheets[0]\n worksheet.sheet_name = 'All'\n index = 0\n @domains.each do |domain|\n next if domain.name.nil?\n next if domain.name.empty?\n index += 1\n if domain.transferable\n my_row = [domain.name, \"yes\"]\n else\n my_row = [domain.name, \"no\"]\n end\n worksheet_write_row(worksheet,index, my_row)\n end\n file = \"DomainPortfolio-All-\" + Time.now.strftime('%m%d%Y') + \".xlsx\"\n send_data workbook.stream.string, filename: file, disposition: 'attachment'\n else\n redirect_back :fallback_location => root_path, :alert => \"Access denied.\"\n end\n end",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def save_vix_future_data(year, month, directory, force_download = false)\n force_download = force_download || year > Today.year || (year == Today.year && month >= Today.month) # we want to re-download files for contracts that haven't expired yet\n \n month_code = MonthToMonthCode[month]\n year_suffix = year.to_s[-2..-1]\n file_name = \"CFE_#{month_code}#{year_suffix}_VX.csv\"\n file_path = File.join(directory, file_name)\n \n if File.exists?(file_path) && !force_download\n puts \"File #{file_path} already exists. Skipping.\"\n else\n url = \"http://cfe.cboe.com/Publish/ScheduledTask/MktData/datahouse/#{file_name}\"\n\n puts \"Downloading #{url}\"\n file_contents = open(url).read()\n File.open(file_path, 'w') { |file| file.write(file_contents) }\n end\n \n file_path\nrescue => e\n puts e.message\nend",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def atop_csv\n \"#{@archive_root}/#{master.hostname}/atop_log_#{@gatling_scenario.downcase.gsub('.', '_')}.csv\"\n end",
"def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end",
"def download_prices() \n\t\t \t \t \t\n\t\t\t\t@dropbox_token = DropboxSession.deserialize(Linkdropbox.first.dropbox_token)\n\t\t\t\tclient = DropboxClient.new(@dropbox_token)\n\t\t\t\tcontents, metadata = client.get_file_and_metadata('Grupo8/DBPrecios.accdb')\n\t\t\t\tbegin\n\t\t\t\t open('public/jars/DBPrecios.accdb', 'wb') {|f| f.puts contents }\n\t\t\t\t \n\t\t\t\t \n\t\t\t\trescue\n\t\t\t\t flash[:success] = \"Exception occured while downloading...\"\t\t\n\n\t \t\tend \n\n\t \t\tLinkdropbox.import_prices_to_csv\t\n\tend",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def click_download_as_csv_button\n wait_until_bus_section_load\n download_as_csv_btn.click\n end",
"def render_csv\n report = report_for_rendering\n filename = filename_timestamp(report.title)\n disable_client_cache\n send_data(report.to_csv, :filename => \"#{filename}.csv\")\n end",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end",
"def index\n @companies = Company.all\n @top_billing = Company.ordered_by_current_bill\n\n @import = Company::Import.new\n # authorize @companies\n skip_authorization\n respond_to do |format|\n format.html\n format.csv { send_data @companies.to_csv, filename: \"companies-export-#{Time.now}-inclustaff.csv\" }\n \tend\n\n end",
"def index\n\t\t@users= User.all\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.csv { send_data Importer.generate(:csv, @users), filename: \"users-#{Date.today}.csv\" }\n\t\tend\n\n\tend",
"def download\n create_agent\n login\n fetch_feed\n create_catalog\n download_catalog\n end",
"def download_handoff_report\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure_new(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv_new(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report-New.csv'\n\n \tend",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def export\n @transactions = Transaction.find_all_by_user_id(current_user.id)\n csv = \"\"\n i = 0\n @transactions.each do |trans|\n if (i==0)\n csv += trans.to_csv(true)\n else\n csv += trans.to_csv(false)\n end\n i += 1\n end\n\n respond_to do |format|\n format.csv { send_data csv }\n end\n end",
"def do_csv_search(params, download)\n s = do_search(params.merge({:limit => self.count, :offset => 0}))\n \n # any possible 'speed up' would need to be done here:\n results = s.results.map do |obj|\n obj.search_result_format\n end\n\n headers = results.first.keys\n filename = download.filename\n user = download.user\n id = download.id\n path = \"tmp/#{id}_#{user}_#{filename}\"\n \n csv_file = CSV.open(path, \"wb\") do |csv|\n csv << headers\n results.each do |r|\n csv << r.values \n end\n end\n\n Zip::File.open(\"#{path}.zip\", Zip::File::CREATE) do |zipfile|\n zipfile.add(filename, path)\n end\n\n File.delete(path) if File.exist?(path)\n\n download.update({status: 1, filename: \"#{filename}.zip\"})\n #download.created_by.notify(\"Your download '#{download.filename}' is ready.\")\n end",
"def download_data\n puts 'Downloading data ...'\n\n cmd = \"wget --output-document=#{DATA_PATH} #{SFPD_DATA}\"\n system cmd\n end",
"def export\n @donor = Donor.order(:created_at)\n respond_to do |format|\n format.html\n format.csv { send_data @donor.as_csv, filename: \"Donors Export #{Date.today}.csv\" }\n end\n end",
"def welldcsv\n @payment_run = PaymentRun.find(params[:id])\n \n paydates = ''\n paydates = @payment_run.payment_date.to_s(:local) unless @payment_run.payment_date.blank?\n \n send_data(@payment_run.welld_payment_file,:type => 'text/plain', :filename => \"payments-RUN#{@payment_run.id}-#{paydates}.csv\" )\n end",
"def index\n @pagetitle = \"Suppliers\"\n \n @companies = Company.all\n\n @path = 'suppliers'\n\n @suppliercsv = Supplier.all \n respond_to do |format|\n format.html\n format.csv { send_data @suppliercsv.to_csv }\n \n end\n\n\n end",
"def index\n # @books = Book.all.order(id: \"DESC\")\n\n respond_to do |format|\n format.html\n format.csv {send_data @books.generate_csv, filename: \"book_on_rails-#{Time.zone.now.strftime('%Y%m%d%S')}.csv\"}\n end\n end",
"def download\n if platinum_user_and_above?\n urls=SiteUrl.where(\"site is not null\")\n workbook = RubyXL::Workbook.new\n worksheet = workbook.worksheets[0]\n worksheet.sheet_name = 'urls'\n header = [\"Site\",\"Url\",\"Last Update\"]\n index = 0\n worksheet_write_row(worksheet,index,header)\n urls.each do |url|\n next if url.site.nil?\n next if url.site.empty?\n index += 1\n my_row = [url.site, url.url, url.updated_at]\n worksheet_write_row(worksheet,index, my_row)\n end\n file = \"Discovered_Urls_\" + Time.now.strftime('%m%d%Y') + \".xlsx\"\n send_data workbook.stream.string, filename: file, disposition: 'attachment'\n else\n redirect_back :fallback_location => root_path, :alert => \"Access denied.\"\n end\n end",
"def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end",
"def export_users(file)\n users = User.all\n\n csv_string = FasterCSV.generate do |csv|\n users.each do |user|\n csv << [user.email, user.first_name, user.last_name]\n end\n end\n\n send_data csv_string,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :disposition => \"attachment; filename=users.csv\"\n File.open(file,\"w\").write\n end",
"def show\n respond_to do |format|\n format.html\n format.csv { send_data @cycle.to_csv, filename: \"users-#{Date.today}.csv\" }\n end\n end",
"def index\n @trainers = Trainer.paginate(:page => params[:page]).order(email_dirigeant: :desc, crawled_for_email: :desc)\n respond_to do |format|\n format.html\n format.csv { send_data Trainer.all.to_csv}\n end\n end",
"def download_sample\n if can?(:>, \"4\")\n send_file Rails.public_path+\"/excel/Import/sample_file_part_upload.csv\", :disposition => \"attachment\"\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def perform\n SiteConfig.set_pairwise_credentials(photocracy)\n earl = Earl.find(earl_id)\n\n # make HTTP request to pairwise to get export data\n url = URI.parse(\"#{APP_CONFIG[:API_HOST]}/exports/#{export_key}\")\n req = Net::HTTP::Get.new(url.path)\n # important to trigger basic HTTP Auth on pairwise\n req[\"Accept\"] = \"text/csv\"\n req.basic_auth Question.user, Question.password\n res = Net::HTTP.start(url.host, url.port) { |http| http.request(req) }\n if res.code != \"200\"\n raise \"Export URL returned response code of #{res.code} for #{url.to_s}\"\n end\n csvdata = res.body.force_encoding('UTF-8')\n\n # for creating zlibed CSV at the end\n zoutput = Zlib::Deflate.new\n znewcsv = ''\n\n earl.munge_csv_data(csvdata, type).each do |row|\n znewcsv << zoutput.deflate(row)\n end\n znewcsv << zoutput.finish\n zoutput.close\n\n export_id = Export.connection.insert(\"INSERT INTO `exports` (`name`, `data`, `compressed`) VALUES (#{Export.connection.quote(export_key)}, #{Export.connection.quote(znewcsv)}, 1)\".force_encoding('ASCII-8BIT'))\n Delayed::Job.enqueue DestroyOldExportJob.new(export_id), 20, 3.days.from_now\n url = \"/export/#{export_key}\"\n IdeaMailer.deliver_export_data_ready(email, url, photocracy)\n\n return true\n end",
"def download\n @data = HeyDan::Helper.get_data_from_url(HeyDan.cdn + '/' + dataset_file_name)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def index\n @mailing_list = MailingList.find(params[:mailing_list_id]) \n \n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @customers }\n format.csv { \n csv_file = Customer.to_csv(current_user.id)\n send_data csv_file, :type => 'text/csv', :disposition => 'attachment;\nfilename=output.csv'\n\n \n\n }\n end\n end",
"def export(params={})\n columns = delimited_string_to_array(Settings.export.travel_fields)\n send_data Travel.export(columns), :filename => \"travel.csv\"\n end",
"def personnel_data\n users = User.all.compact.uniq\n data = generate_csv(users)\n render :template => \"shared/csv_data\", :locals => {:data => data}, :layout => false\n end",
"def download_csv dir, expect=false \n browser = @browserOperator.browser\n csv_btn = browser.button(:id, \"DownloadCsvBtn\")\n\t \n raise Watir::Exception::UnknownObjectException if !csv_btn.visible? \n \n browser.radio(:value, 'time_range').click\n today = Date.today().strftime('%Y/%m/%d')\n end_hour = Time.now()\n end_time = '%02d:00' % end_hour.hour\n start_time = '%02d:00' % (end_hour - 60*60).hour\n \n \n js_string = \"document.getElementById('StartDateInput').value = '#{today}';\n document.getElementById('StartTimeInput').value = '#{start_time}';\n document.getElementById('EndDateInput').value = '#{today}';\n document.getElementById('EndTimeInput').value = '#{end_time}';\"\n \n browser.execute_script(js_string)\n browser.button(:value, 'Submit').click\n @browserOperator.wait_loading()\n \n case_name = get_case_name() \n puts '[ReportCaseBase] %s Download CSV' % case_name\n csv_btn.click\n \n if expect\n filename = case_name.gsub(/[\\\\\\/:\\*\\?<>|\\\"]/, '') << '_(expect).csv'\n else\n filename = case_name.gsub(/[\\\\\\/:\\*\\?<>|\\\"]/, '') << '.csv'\n end \n file_path = File.join(dir, filename)\n \n PopupHandle.deal_download_dialog(file_path)\n \n waitting_download_finish(file_path)\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def export_io(dataset:)\n Dir.chdir(dir) { IO.popen(\"dat export -d #{dataset} --full\") }\n end",
"def hv_export_users(oHarvest, iDbg = 0)\n\n\tusers = oHarvest.users.all\n\n summary = Array.new()\n\n\tusers.each do |u|\n if (u.is_active == true && u.is_admin == false)\n p_user = Array.new(5)\n p_user[0] = u.first_name\n p_user[1] = u.id\n p_user[2] = u.email\n p_user[3] = u.department\n p_user[4] = u.last_name\n \n summary.push(p_user)\n end\n end\n\n summary = summary.sort { |x, y|\n x[0] <=> y[0]\n }\n\n file = get_config(\"COMMON\",\t\"CSVPath\") + get_config(\"Harvest\", \"MUsers\")\n flush_to_csv(summary, file, true)\n\nend",
"def index\n @accounts = Account.all\n respond_to do |format|\n format.html \n format.json\n format.csv { send_data @accounts.to_csv, filename: \"accounts-#{Date.today}.csv\" }\n end\n end",
"def csv_data\n sql = Utils.setup_basic_filters(SqlAssembler.new, @params.merge('site_id' => @site[:id]))\n sql.set_select 'SELECT package_name, success, title, digitool_id, islandora_pid, content_model, time_started, time_finished, bytes_ingested FROM islandora_packages'\n sql.set_order 'ORDER BY id DESC'\n return sql.execute\n end",
"def report_csv_download(report)\n\t\t\tpost= { \"token\" => @token, \"report\" => report, \"xslt\" => 'csv.xsl' }\n\n\t\t\t# Get CSV report location and build post params\n\t\t\tfilename=nessus_http_request('file/xslt', post).scan(/fileName=(.*csv)/).flatten.first\n\t\t\tpost= {\"token\" => @token, 'fileName' => filename, 'step' => 2}\n\n\t\t\t# Allow for generation time\n\t\t\tRex::ThreadSafe.sleep(0.5)\n\n\t\t\t# Get CSV report\n\t\t\tfile=nessus_http_request('file/xslt/download',post)\n\n\t\t\treturn file\n\t\tend",
"def persist \n require 'csv'\n @csv_string = CSV.generate do |csv|\n csv << Record.attribute_names\n Record.find_each do |record|\n csv << record.attributes.values\n end\n end\n return send_data(@csv_string, :filename => \"downloadTest.csv\") \n end",
"def exported_report\n #The folder where the filename points to, is actually in the ~/rails/Forester because of capistrano as\n # the Apache point to ~/rails/Forester/current symlinkfolder and capistrano updates the them. \n @filename = \"quarterly_report_#{params[:year]}_#{params[:quarter]}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n if params[:quarter] == \"1\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{(params[:year].to_i-1)}-12-31' AND delivery_date<'#{params[:year]}-04-01'\")\n else\n if params[:quarter] == \"2\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-03-31' AND delivery_date<'#{params[:year]}-07-01'\")\n else\n if params[:quarter] == \"3\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-06-30' AND delivery_date<'#{params[:year]}-10-01'\")\n else\n if params[:quarter] == \"4\" then\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-09-30' AND delivery_date<'#{(params[:year].to_i+1)}-01-01'\")\n end\n end\n end\n end\n \n #Writing to file starts with empty line.\n File.open(\"#{@file_path}#{@filename}\", 'w') do |writer|\n writer.puts(\"\\n\")\n end\n \n #From the tickets delivered in the given quarter, the job ids are gathered here\n @job_ids = @tickets.collect {|i| i.job_id }\n @jobs = Job.find(@job_ids)\n \n #To have less DB calls, all specie records are put into an instance variable\n @species = Specie.all\n \n #Goes through all the jobs, for each sums up all the mbf and tonnages and writes them into the file\n # per specie.\n @jobs.each do |k|\n @my_tickets = []\n @tickets.each do |l|\n if l.job_id == k.id\n @my_tickets.push(l)\n end\n end\n \n @amounts = []\n \n @species.length.times do\n @amounts.push([0, 0])\n end\n \n @total_pulp = 0\n \n @my_tickets.each do |i|\n i.load_details.each do |j|\n if i.wood_type == 3 || j.species_id == 0 #wood type 3 & species_id 0 == pulp\n @total_pulp = @total_pulp + j.tonnage\n next #If load is pulp, it has only one load detail so program jups to next loop\n end\n #Amounts of mbf/tonnage are summed up here per ticket according to their specie.\n @amounts[j.species_id-1][0] = @amounts[j.species_id-1][0] + j.mbfss #This and triple-n tonnage in next are helper methods. See their documentation.\n @amounts[j.species_id-1][1] = @amounts[j.species_id-1][1] + j.tonnnage\n end\n end\n \n #Finally, the values calculated above are written into the file.\n File.open(\"#{@file_path}#{@filename}\", 'a') do |writer|\n writer.puts \"Job, #{k.name}\"\n writer.puts \"Category, MBF, Tonnage\"\n @species.each do |i|\n writer.puts \"#{i.code}, #{round_to(@amounts[i.id-1][0].to_f, 2)}, #{round_to(@amounts[i.id-1][1].to_f, 2)}\"\n end\n writer.puts \"Pulp, ,#{round_to(@total_pulp.to_f, 2)}\"\n writer.puts(\"\\n\")\n end\n end\n \n #The file created is opened in 'r' (== read) mode and send to user\n @file = File.open(\"#{@file_path}#{@filename}\", 'r')\n \n send_data(@file.read, :type => \"csv\", :filename => @filename)\n end",
"def export\n headers = JSON[params[:column_array]]\n rows = JSON[params[:row_array]]\n column_names = Array.new\n headers.each do |col|\n column_names << col\n end\n csv_string = CSV.generate do |csv|\n csv << column_names\n rows.each do |row|\n csv << row\n end\n end\n\n filename = params[:file_name] + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def index\n @archives = Archive.all\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"datos-plataforma-CP.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n end",
"def index\n if current_user.admin?\n @app_schedules = AppSchedule.all\n\n respond_to do |format|\n format.html\n format.csv { send_data @app_schedules.to_csv, filename: \"appsched-#{Date.today}.csv\"}\n end\n else\n @app_schedules = current_user.app_schedules.where(user_id: current_user)\n respond_to do |format|\n format.html\n format.csv { send_data @app_schedules.to_csv, filename: \"appsched-#{Date.today}.csv\"}\n end\n end\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def csv_export(export, dir, locale)\n status = \"ready\"\n\n begin\n Dump::CSVDump.new.dump(\n export.catalog.slug,\n File.join(dir, 'csv'),\n locale,\n export.with_files\n )\n zip(dir, export.pathname)\n rescue StandardError => e\n status = \"error\"\n Rails.logger.error \"[ERROR] Catalog dump: #{e.message}\"\n end\n\n export.update(status: status)\n send_mail(export)\n end",
"def index\n\t\t@leaders=Leader.all.order(\"first_name ASC, last_name ASC\")\n\n\t respond_to do |format|\n\t format.html\n\t format.csv do\n\t filename = \"mpate-\" + params[:controller] + \"-\" + Time.now.strftime(\"%m-%e-%Y\")\n\t headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n\t headers['Content-Type'] ||= 'text/csv'\n\t end\n\t end\n\tend",
"def index\n\t\t@managers=Manager.all\n\n respond_to do |format|\n format.html\n format.csv do\n filename = \"mpate-\" + params[:controller] + \"-\" + Time.now.strftime(\"%m-%e-%Y\")\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n\tend",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def index\n @bestthesisawards = Bestthesisaward.all\n respond_to do |format|\n format.html\n format.csv { send_data @bestthesisawards.to_csv, \n filename: \"bestthesisawards-#{Date.today}.csv\" }\n end\n end",
"def main()\n request_url = \"#{$canvas_url}/api/v1/users/#{$canvas_user_id}/page_views?per_page=100&start_time=#{$start_time}&end_time=#{$end_time}\"\n method = \"get\"\n options = {}\n data = canvasApiRequest(method,request_url,options)\n compiledHash = []\n data.each do |hash|\n hashData = flattenHash(hash)\n compiledHash.push(hashData)\n end\n outputToCSV(compiledHash)\nend",
"def csv(section = 'main', q='google',date='ytd',geo='')\n trend_params = {\"graph\"=>\"all_csv\", \"sa\" => \"N\"}\n trend_params[\"q\"] = q\n trend_params[\"date\"] = date\n if !geo || geo != ''\n trend_params[\"geo\"] = geo\n end\n\n data = @client.get_content(URI.parse(@url_Export), trend_params)\n # empty to return all data\n if section == ''\n return CSV.parse(data)\n end\n # split data into sections\n segments = data.split(\"\\n\\n\\n\")\n if section == 'main'\n section = ['Week', 'Year', 'Day','Month']\n else\n section = [section]\n end\n\n for x in segments do\n if section.include? x.split(',')[0].strip\n maindata = CSV.parse(x)\n return maindata\n end\n end\n end",
"def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def index\n @databases = Database.all\n respond_to do |format|\n format.html do\n @databases = @databases.includes(:vendor).order('name ASC')\n render :table\n end\n format.csv do\n @databases = @databases.includes(\n :database_subjects,\n :resources,\n :subjects, :vendor,\n :access_plain_text,\n :access_type\n )\n .order('name ASC')\n send_data @databases.to_csv, filename: \"databases-#{Date.today}.csv\"\n end\n end\n end",
"def download_data\n view = session[:view].dup if session[:view] # Copy session view, if it exists\n options = session[:paged_view_search_options].merge(:page => nil, :per_page => nil) # Get all pages\n view.table, _attrs = view.paged_view_search(options) # Get the records\n\n view.title = _(view.title.pluralize)\n view.headers.map! { |header| _(header) }\n\n case params[:download_type]\n when 'pdf' then download_pdf(view)\n when 'text' then download_txt(view, filename_timestamp(view.title))\n when 'csv' then download_csv(view, filename_timestamp(view.title))\n end\n end",
"def index\n tasks = Task.where(user_id: session[:user_id])\n @tasks = tasks.sort_by(&:created_at).reverse\n\n #binding.pry\n\n respond_to do |format|\n format.html\n format.csv { send_data as_csv(@tasks), filename: \"tasks-#{Date.today}.csv\" }\n end\n end",
"def export_accounts\n # TODO: THERE'S DUPLICATION FROM InvoicesController in CSV setup.\n\n # These charsets are expected to be common in our users.\n charset = (request_from_a_mac? ? \"MacRoman\" : \"ISO-8859-1\")\n norm = lambda {|o| Iconv.conv(\"#{charset}//IGNORE\", \"UTF-8\", o.to_s)}\n col_sep = (request_from_windows? ? \",\" : ';') # Excel understands this one automatically\n row_sep = (request_from_windows? ? \"\\r\\n\" : \"\\n\") # in case people treat it as a text file\n\n csv_string = FasterCSV.generate(:col_sep => col_sep, :row_sep => row_sep) do |csv|\n csv << %w(Nombre Antiguedad Login Facturas Clientes).map {|h| norm.call(h)}\n # this iterator is provided by pseudo_cursors\n Account.find_each(:include => :owner) do |account|\n csv << [\n account.name,\n format_date(account.created_at.to_date),\n (format_date(account.owner.last_seen_at.to_date) rescue ''),\n account.invoices.count,\n account.customers.count\n ].map {|h| norm.call(h)}\n end\n end\n send_data(csv_string, :type => \"text/csv; charset=#{charset}\", :filename => \"accounts_#{Time.now.strftime(\"%Y%m%d\")}.csv\")\n end",
"def download_handoff_report_old\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report.csv'\n\n \tend"
] | [
"0.7141524",
"0.70957816",
"0.687725",
"0.681005",
"0.6804725",
"0.6769631",
"0.6722505",
"0.66143596",
"0.6534622",
"0.6529854",
"0.6515218",
"0.6496916",
"0.6465505",
"0.64329946",
"0.6424665",
"0.6418542",
"0.63845885",
"0.63584995",
"0.6355562",
"0.6339171",
"0.63039297",
"0.6302253",
"0.6287405",
"0.6274748",
"0.62605673",
"0.6254027",
"0.624795",
"0.62073517",
"0.6197826",
"0.61910284",
"0.61674935",
"0.6166374",
"0.61411107",
"0.61406034",
"0.61217666",
"0.6111103",
"0.61010367",
"0.6095858",
"0.60863286",
"0.60561585",
"0.60460055",
"0.60383636",
"0.6028103",
"0.602437",
"0.6020586",
"0.60181344",
"0.6016544",
"0.6015338",
"0.6012524",
"0.6012433",
"0.5986601",
"0.59587294",
"0.59575677",
"0.5957565",
"0.5955805",
"0.5953157",
"0.594844",
"0.59417224",
"0.5937849",
"0.59289724",
"0.5926155",
"0.59179443",
"0.5914243",
"0.59040296",
"0.5901063",
"0.58928293",
"0.589103",
"0.58855444",
"0.5883636",
"0.5878058",
"0.58770204",
"0.58769614",
"0.5875883",
"0.5872252",
"0.5867588",
"0.585737",
"0.58542097",
"0.58535767",
"0.5847922",
"0.58434767",
"0.5840303",
"0.58400244",
"0.5835156",
"0.5832628",
"0.5829161",
"0.58287543",
"0.5825252",
"0.5808501",
"0.580667",
"0.5806386",
"0.58059573",
"0.5804224",
"0.58036",
"0.5790238",
"0.5785891",
"0.5782192",
"0.578103",
"0.57746196",
"0.5769352",
"0.5766219"
] | 0.6429926 | 14 |
creates csv file containing two tables, one with Total_Research_Spending, Total_Estimated_Research_Spending, Total_Grants_Spending, Total_Number_of_Charities, Research_Forms_Submitted and Grant_Forms_Submitted and another table containing category data. | def dash_data_create
set_cat_data
grants_percent = ((session[:research_sub].to_f / session[:num_charities].to_f) * 100).to_s + '%'
research_percent = ((session[:grants_sub].to_f / session[:num_charities].to_f) * 100).to_s + '%'
a_val = Array.new()
b_val = Array.new()
c_val = Array.new()
d_val = Array.new()
e_val = Array.new()
values = Array.new()
a_val.push('a', @a_research, @a_grant)
b_val.push('b', @b_research, @b_grant)
c_val.push('c', @c_research, @c_grant)
d_val.push('d', @d_research, @d_grant)
e_val.push('e', @e_research, @e_grant)
values.push(session[:rex_spending], session[:rex_est_spending], session[:grant_total], session[:num_charities], grants_percent, research_percent)
attributes = %w{Total_Research_Spending Total_Estimated_Research_Spending Total_Grants_Spending Total_Number_of_Charities Research_Forms_Submitted Grant_Forms_Submitted}
attributes1 = values
attributes2 = %w{Category Research_Expenditure Grant_Expenditure}
attributesa = a_val
attributesb = b_val
attributesc = c_val
attributesd = d_val
attributese = e_val
CSV.generate(headers: true) do |csv|
csv << attributes
csv << attributes1
csv << attributes2
csv << attributesa
csv << attributesb
csv << attributesc
csv << attributesd
csv << attributese
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_summary_csv(summary_csv_path)\n puts \"creating summary csv: #{summary_csv_path}\"\n\n CSV.open(summary_csv_path, \"wb\") do |csv|\n csv << SUMMARY_CSV_HEADINGS\n end\nend",
"def to_csv()\n all = general_info_csv() + \"\\r\\n\" +\n teacher_for_csv() + \"\\r\\n\" +\n contributor_to_csv() + \"\\r\\n\" +\n published_in_csv() + \"\\r\\n\" +\n text_fields_csv()\n all\n end",
"def build_csv()\n\t\tconcat_path = \"#{@location}/#{@name}.csv\"\n\n\t\tCSV.open(concat_path, 'wb') do |csv_line|\n\t\t\t\t\t\n\t\t\theaders = ['movie_date', 'title', 'lifetime_gross_sales']\n\t\t\tcsv_line << headers\n\n\t\t\t@cage.each do |cage|\n\t\t\t\tcsv_line << [cage[:movie_date], cage[:title], cage[:life_time_gross]]\n\t\t\tend\n\t\tend\n\tend",
"def organise_data\n # Single big csv file where we want to aggregate all data\n outfile = File.open('energy-consumption.csv', 'w')\n\n # Columns we want to have in the output file:\n # * category = {commercial, residential}\n # * type = {restaurant, hospital, retail, etc.}\n # * name = name of the building (generally just the city where it is located)\n # * date = date of the year\n # * time = time of the day (hour by hour)\n # * energy = measured energy consuption for that date and hour\n outfile.puts('category,type,name,date,time,energy')\n \n rw_commercial_buildings_data(outfile)\n rw_residential_buildings_data(outfile)\n \n outfile.close\nend",
"def write_csv\n # header = \"name, description\"\n CSV.open(@csv_file, 'wb') do |csv|\n # csv << header\n @recipes.each do |row|\n csv << [row.name, row.description, row.cooking_time, row.difficulty, row.tested]\n end\n end\n end",
"def create_csv\n CSV.open('./data/slcsp.csv', 'wb') do |csv|\n csv << ['zipcode', 'rate']\n end\n build_csv\n end",
"def save\n csv = [] # store CSV lines to write to file\n # Iterate over BasicFoods and retrieve CSV format for each \n @basic_foods.each do |key, basic_food|\n csv.push(basic_food.csv)\n end\n # Iterate over Recipes and retrieve CSV format for each\n @recipes.each do |key, recipe|\n csv.push(recipe.csv)\n end\n File.write('FoodDB.txt', csv.join(\"\\n\")) # Write CSV lines to file\n end",
"def export_the_thing\n #First, the file name, path and table headers will be set according to given id \n if params[:id] == \"1\"\n @jobs = Job.all\n \n #Same filepath thingy here as above\n @filename = \"Jobs_on_#{Time.now.strftime(\"%Y-%m-%d_%H:%M:%S\")}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n @table_name = \"Jobs\"\n @table_headers = \"Name, Owner Name, Logger Name, Trucker Name, HFI-rate (%), HFI-prime\" \n end\n \n if params[:id] == \"2\"\n @tickets = Ticket.all\n \n @filename = \"Tickets_on_#{Time.now.strftime(\"%Y-%m-%d_%H:%M:%S\")}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n @table_name = \"Tickets\"\n \n @species = \"\"\n Specie.all.each do |i|\n @species = \"#{@species}, #{i.code}\"\n end\n \n @table_headers = \"Number, Delivery Date, Destination Name, Job Name, Wood Type#{@species}, Tonnage, Net MBF, Load Pay, Logger Pay, Trucker Pay, HFI Pay, Owner Pay\" \n end\n if params[:id] == \"3\"\n @payments = PaymentFromDestination.all\n \n @filename = \"Payments_on_#{Time.now.strftime(\"%Y-%m-%d_%H:%M:%S\")}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n @table_name = \"Payments\"\n @table_headers = \"Date, Destination Name, Job Name, Payment #, Wood Type, Net MBF, Tonnage, Total Payment\" \n end\n if params[:id] == \"4\"\n @receipts = Receipt.all\n \n @filename = \"Receipts_on_#{Time.now.strftime(\"%Y-%m-%d_%H:%M:%S\")}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n @table_name = \"Receipts\"\n @table_headers = \"Date, Job Name, Owner Type, Owner Name, Payment #, # of Tickets, Total Payment\" \n end\n\n \n #Then, file is created with name and path set above and the headers are written to the file\n #After writing the headers, the data according to given id is written.\n File.open(\"#{@file_path}#{@filename}\", \"w\") do |writer|\n writer.puts @table_name\n writer.puts @table_headers\n \n if params[:id] == \"1\"\n @jobs.each do |i|\n @puts = \"#{i.name}, #{i.owner.name.gsub(',', '')}, #{i.logger.name.gsub(',', '')}, #{i.trucker.name.gsub(',', '')}, #{i.hfi_rate}, #{i.hfi_prime}\"\n writer.puts @puts\n end\n end\n \n if params[:id] == \"2\"\n @tickets.each do |i|\n @amounts = []\n \n Specie.all.each do\n @amounts.push(0)\n end\n \n i.load_details.each do |j|\n unless j.mbfs.nil?\n @amounts[j.species_id-1] = j.mbfs\n else\n @amounts[j.species_id-1] = 0\n end\n end\n \n @amounts_str = \"\"\n @amounts.each do |j|\n @amounts_str = \"#{@amounts_str}, #{j}\"\n end\n \n if i.logger_rate.nil? \n i.logger_value = 0\n else\n \n if i.logger_rate.rate_type == \"MBF\"\n i.logger_value = i.net_mbf * i.logger_rate.rate\n i.logger_value = i.logger_value.round(2)\n end\n if i.logger_rate.rate_type == \"Tonnage\"\n i.logger_value = i.tonnage * i.logger_rate.rate\n i.logger_value = i.logger_value.round(2)\n end\n if i.logger_rate.rate_type == \"percent\"\n i.logger_value = i.value * (i.logger_rate.rate/100)\n i.logger_value = i.logger_value.round(2)\n end\n end\n \n if i.trucker_rate.nil?\n i.trucker_value = 0\n else\n if i.trucker_rate.rate_type == \"MBF\"\n i.trucker_value = round_to(i.trucker_rate.rate*i.net_mbf, 2)\n else\n if i.trucker_rate.rate_type == \"Tonnage\"\n i.trucker_value = round_to(i.trucker_rate.rate*i.tonnage, 2)\n else\n i.trucker_value = round_to(i.trucker_rate.rate/100*i.value, 2)\n end\n end\n end\n \n if i.job.hfi_rate.nil?\n i.hfi_value = 0\n else\n i.hfi_value = (i.job.hfi_rate/100)*i.value\n end\n \n i.owner_value = i.value - i.hfi_value - i.logger_value - i.trucker_value\n \n @puts = \"#{i.number}, #{i.delivery_date}, #{i.destination.name.gsub(',', '')}, #{i.job.name.gsub(',', '')}, \"\n @puts << \"#{WoodType.find(i.wood_type).name}#{@amounts_str}, #{i.tonnage}, #{i.net_mbf}, #{give_pennies(i.value).gsub(',', '')}, #{give_pennies(i.logger_value).gsub(',', '')}, #{give_pennies(i.trucker_value).gsub(',', '')}, #{give_pennies(i.hfi_value).gsub(',', '')}, #{give_pennies(i.owner_value).gsub(',', '')}\"\n writer.puts @puts\n end\n end\n \n if params[:id] == \"3\"\n @payments.each do |i|\n @puts = \"#{i.payment_date}, #{i.destination.name.gsub(',', '')}, #{i.job.name.gsub(',', '')}, #{i.payment_num}, \"\n @puts << \"#{WoodType.find(i.wood_type).name}, #{i.tonnage}, #{i.net_mbf}, \"\n @puts << \"#{give_pennies(i.total_payment).gsub(',', '')}\"\n writer.puts @puts\n end\n end\n \n if params[:id] == \"4\"\n @receipts.each do |i|\n if i.owner_type == \"owner\"\n @puts = \"#{i.receipt_date}, #{i.job.name.gsub(',', '')}, #{i.owner_type}, #{Owner.find(i.owner_id).name.gsub(',', '')}, #{i.payment_num}, \"\n @puts << \"#{i.tickets.count}, #{i.total_payment.to_s}\"\n else\n if i.owner_type == \"hfi\"\n @puts = \"#{i.receipt_date}, #{i.job.name.gsub(',', '')}, #{i.owner_type}, Halme Forestry Inc, #{i.payment_num}, \"\n @puts << \"#{i.tickets.count}, #{i.total_payment.to_s}\"\n else \n @puts = \"#{i.receipt_date}, #{i.job.name.gsub(',', '')}, #{i.owner_type}, #{Partner.find(i.owner_id).name.gsub(',', '')}, #{i.payment_num}, \"\n @puts << \"#{i.tickets.count}, #{i.total_payment.to_s}\"\n end\n end\n writer.puts @puts \n end\n end\n \n end\n \n #Written file is opened for sending and sent\n @file = File.open(\"#{@file_path}#{@filename}\", \"r\")\n \n send_data(@file.read, :type => \"csv\", :filename => @filename)\n end",
"def output_csv(datastore)\n CSV.open(OUTPUT_CSV, \"wb\") do |csv|\n csv << \t[\"Period\", \"First Use In\", \"Engaged Users\"]\n\n datastore.keys.sort.each do |date_key|\n pretty_date = Date.parse(date_key.split(\"_\")[0]).strftime(\"%Y-%m-%d\")\n datastore[date_key].keys.sort.each do |cohort_key|\n pretty_cohort= Date.parse(cohort_key.split(\"_\")[0]).strftime(\"%Y-%m-%d\")\n csv << [ pretty_date, pretty_cohort,\tdatastore[date_key][cohort_key]['total_users']]\n end\n end\n end\nend",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def create_csv()\n hashes = Transaction.all_as_hashes\n CSV.generate do |csv|\n # Adds the keys as headings on the first line\n csv << hashes.first.keys\n # Iterates through the transactions and populates CSV\n hashes.each do |hash|\n csv << hash.values\n end\n end\n end",
"def generate_csv\n csv_string = CSV.generate do |csv|\n csv << [\"Sl no.\",\"Scheme\",\"IP no.\", \"Patient name\", \"UHID no.\", \"Reg no.\", \"DOA\", \"DOS\", \"DOD\", \"Plan of treatment\", \"Approved Amount\",\"Claim Amount\", \"TDS\",\"Net Amount\",\"Hospital charges\",\"Medicine charges\",\"Implant charges\"]\n sl_no = 1\n @claims.each do |claim|\n csv <<[sl_no,\"#{claim.scheme rescue \"\"}\",\"#{claim.ip_no rescue \"\"}\",\"#{claim.patient_name rescue \"\"}\",\"#{claim.uhid_no rescue \"\"}\",\"#{claim.reg_no rescue \"\"}\",\"#{claim.date_of_admission}\",\"#{claim.date_of_surgery}\",\"#{claim.date_of_discharge}\",\"#{claim.plan_of_treatment rescue \"\"}\",(claim.approved_amount rescue 0.0),(claim.claim_amount rescue 0.0),(claim.tds_amount rescue 0.0),(claim.net_amount rescue 0.0),(claim.hospital_charge rescue 0.0),(claim.medicine_charge rescue 0.0),(claim.implant_charge rescue 0.0)]\n sl_no +=1\n end\n end\n csv_string\n end",
"def create_csv\n title = [\"Typed Word\",\"Suggestion\", \"Score\", \"Frequency\", \"ED?\", \"Count ED\", \"Year\"]\n CSV.open('output.csv', 'a') do |csv|\n csv << title\n end\n end",
"def save_csv\n CSV.open(@csv_file_path, \"wb\") do |csv|\n csv << [\"name\", \"description\", \"rating\", \"prep_time\", \"done\"]\n @recipes.each do |recipe|\n csv << [recipe.name, recipe.description, recipe.rating, recipe.prep_time, recipe.done?]\n end\n end\n end",
"def csv_filler(filing_week,hours,name,hours_total,wage,info,comments,expenses)\n CSV.open(\"#{name}\" + '_' + \"#{filing_week}.csv\", \"wb\") do |csv|\n csv << [\"Name\",\"Week\",\"Hourly Wage\",\"Client\",\"Repo\",\"Commits\",\"Type\",\"Mon\",\"Tue\",\"Wed\",\"Thurs\",\"Fri\",\"Sat\",\"Sun\",\"Total\"]\n csv << [\"#{name}\", \"#{filing_week}\", \"#{wage}\",\"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\"]\n p info\n info.each_pair do |key, value|\n p value\n value.each_pair do |repo, commits|\n commits.each_pair do |date, details|\n client_hours_total = 0\n client_hours = hours[key][0]\n client_hours.each do |day_hours|\n client_hours_total = client_hours_total + day_hours\n end\n if details.class == Array\n details.each do |d|\n csv << [\"\",\"\",\"\",\"#{key}\",\"#{repo}\",\"#{d[\"Message\"]} \\n #{d[\"SHA\"]}\",\"Commits\",\"#{client_hours[0]}\",\"#{client_hours[1]}\",\"#{client_hours[2]}\",\"#{client_hours[3]}\",\"#{client_hours[4]}\",\"#{client_hours[5]}\",\"#{client_hours[6]}\",\"#{client_hours_total}\"]\n end\n else\n csv << [\"\",\"\",\"\",\"#{key}\",\"#{repo}\",\"#{details}\",\"Commits\",\"#{client_hours[0]}\",\"#{client_hours[1]}\",\"#{client_hours[2]}\",\"#{client_hours[3]}\",\"#{client_hours[4]}\",\"#{client_hours[5]}\",\"#{client_hours[6]}\",\"#{client_hours_total}\"]\n end\n end\n end\n end\n week_hours_total = 0\n week_hours = hours_total.values[0]\n wage_number = wage.to_i\n week_hours.each do |week_hours|\n week_hours_total = week_hours_total + week_hours\n end\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Total Hours\",\"#{week_hours[0]}\",\"#{week_hours[1]}\",\"#{week_hours[2]}\",\"#{week_hours[3]}\",\"#{week_hours[4]}\",\"#{week_hours[5]}\",\"#{week_hours[6]}\",\"#{week_hours_total}\"]\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Total Services\",\"\",\"\",\"\",\"\",\"\",\"\",\"\",\"#{week_hours_total * wage_number}\"]\n total_general = 0\n expenses.each_with_index do |expense_type, index|\n if index != 2\n expense_total = 0\n expense_total = expense_total + expense_type[0].to_i\n expense_total = expense_total + expense_type[1].to_i\n expense_total = expense_total + expense_type[2].to_i\n expense_total = expense_total + expense_type[3].to_i\n expense_total = expense_total + expense_type[4].to_i\n expense_total = expense_total + expense_type[5].to_i\n expense_total = expense_total + expense_type[6].to_i\n total_general = total_general + expense_total\n end\n if index == 0\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Expenses(General)\",\"#{expense_type[0]}\",\"#{expense_type[1]}\",\"#{expense_type[2]}\",\"#{expense_type[3]}\",\"#{expense_type[4]}\",\"#{expense_type[5]}\",\"#{expense_type[6]}\", \"#{expense_total}\"]\n elsif index == 1\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Expenses(Milage)\",\"#{expense_type[0]}\",\"#{expense_type[1]}\",\"#{expense_type[2]}\",\"#{expense_type[3]}\",\"#{expense_type[4]}\",\"#{expense_type[5]}\",\"#{expense_type[6]}\", \"#{expense_total}\"]\n elsif index == 2\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Milage Description\",\"#{expense_type[0]}\",\"#{expense_type[1]}\",\"#{expense_type[2]}\",\"#{expense_type[3]}\",\"#{expense_type[4]}\",\"#{expense_type[5]}\",\"#{expense_type[6]}\", \"\"]\n elsif index == 3\n csv << ['',\"\",\"\",\"\",\"\",\"\",\"Expenses(Tolls)\",\"#{expense_type[0]}\",\"#{expense_type[1]}\",\"#{expense_type[2]}\",\"#{expense_type[3]}\",\"#{expense_type[4]}\",\"#{expense_type[5]}\",\"#{expense_type[6]}\", \"#{expense_total}\"]\n elsif index == 4\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"Expenses(Other)\",\"#{expense_type[0]}\",\"#{expense_type[1]}\",\"#{expense_type[2]}\",\"#{expense_type[3]}\",\"#{expense_type[4]}\",\"#{expense_type[5]}\",\"#{expense_type[6]}\", \"#{expense_total}\"]\n end\n end\n wage_hours = week_hours_total * wage.to_i\n p wage_hours\n p total_general\n p week_hours_total\n p wage.to_i\n # p total_general\n total_total = wage_hours + total_general\n p total_total\n csv << [\"\",\"\",\"\",\"\",\"\",\"\",\"TOTAL\",\"\",\"\",\"\",\"\",\"\",\"\",\"\",\"#{total_total}\"]\n end\nend",
"def donation_report_csv\n if date_format_check\n\n filter_by_account, account_id = account_filter\n\n csv_string = FasterCSV.generate do |csv|\n\n if filter_by_account\n filter_conditions = \"AND finance_transaction_receipt_records.fee_account_id #{account_id == nil ? 'IS' : '='} ?\"\n filter_values = [account_id]\n else\n filter_conditions = \"\"\n filter_values = []\n end\n\n category_id = FinanceTransactionCategory.find_by_name(\"Donation\").id\n @donations = FinanceDonation.all(:joins => {:transaction => :transaction_receipt},\n :order => 'finance_transactions.transaction_date desc',\n :conditions => [\"(finance_transactions.transaction_date BETWEEN ? AND ?) AND category_id = ?\n #{filter_conditions}\", @start_date, @end_date, category_id] + filter_values,\n :select => \"finance_donations.*, CONCAT(IFNULL(transaction_receipts.receipt_sequence, ''),\n transaction_receipts.receipt_number) AS receipt_no,\n finance_transactions.voucher_no\")\n\n csv << [t('donations')]\n csv << [t('start_date'), format_date(@start_date)]\n csv << [t('end_date'), format_date(@end_date)]\n csv << [t('fee_account_text'), \"#{@account_name}\"] if @accounts_enabled\n csv << \"\"\n cols = [t('donor'), ('amount'), t('receipt_or_voucher_no'), t('date_text')]\n csv << cols\n\n total = 0\n\n @donations.each do |d|\n csv << [d.donor, precision_label(d.amount.to_f), d.receipt_no.nil? ? d.voucher_no : d.receipt_no,\n format_date(d.transaction_date)]\n total += d.amount.to_f\n end\n\n csv << [t('net_income'), precision_label(total)]\n end\n\n filename = \"#{t('donations')}-#{format_date(@start_date)}-#{format_date(@end_date)}.csv\"\n send_data(csv_string, :type => 'text/csv; charset=utf-8; header=present', :filename => filename)\n end\n end",
"def save_to_csv\n csv_options = { col_sep: ',', quote_char: '\"' }\n CSV.open(@file_path, 'wb', csv_options) do |csv_row|\n # To store recipes, we loop over cookbook recipes array (see initializer)\n @recipes.each do |recipe|\n # CSV SHOULD NOT TAKE INSTANCES\n # We must individually separate the name and description from the instances\n # To then store them in array format into each row of the CSV\n # One row per recipe\n csv_row << [recipe.name, recipe.description]\n end\n end\n end",
"def to_csv\n initialize_generator\n csv_report_generator.records = pull_join\n csv_report_generator.generate_report\n end",
"def create\n selected_courses = []\n account_id = Account.find_by_name('Simon Fraser University').id\n username = params[:course_for]\n cross_list = params[:cross_list]\n params.each do |key, value|\n if key.to_s.starts_with? \"selected_course\"\n selected_courses.push value\n end\n end\n\n @course_csv = []\n @section_csv = []\n @enrollment_csv = []\n selected_courses.each do |course|\n # 20131:::ensc:::351:::d100:::Real Time and Embedded Systems\n course_info = course.split(\":::\")\n term = course_info[0]\n name = course_info[1]\n number = course_info[2]\n section = course_info[3]\n title = course_info[4]\n\n course_id = \"#{term}-#{name}-#{number}-#{section}:::course\"\n section_id = \"#{term}-#{name}-#{number}-#{section}:::section\"\n short_name = \"#{name.upcase}#{number} #{section.upcase}\"\n long_name = \"#{short_name} #{title}\"\n\n @course_csv.push \"#{course_id},#{short_name},#{long_name},#{account_id},active\"\n @section_csv.push \"#{section_id},#{course_id},#{section.upcase},active,,,\"\n @enrollment_csv.push \"#{course_id},#{username},teacher,#{section_id},active\"\n\n end\n\n end",
"def generate_csv\n\n fields = @resource.typus_fields_for(:csv)\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = Rails.root.join(\"tmp\", \"export-#{@resource.to_resource}-#{Time.now.utc.to_s(:number)}.csv\")\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields.keys\n @resource.find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map do |key, value|\n case value\n when :transversal\n a, b = key.split(\".\")\n record.send(a).send(b)\n when :belongs_to\n record.send(key).to_label\n else\n record.send(key)\n end\n end\n end\n end\n end\n\n send_file filename\n\n end",
"def dumpCsvSummary()\n CSV.open(csvFilenameSummary(), \"w\", { :col_sep => csvColSepChar() }) { |csv|\n csv << AnalyzerExpA.genCsvTitleRow() ;\n genCsvSummary().each{|row|\n csv << row ;\n }\n }\n end",
"def exported_report\n #The folder where the filename points to, is actually in the ~/rails/Forester because of capistrano as\n # the Apache point to ~/rails/Forester/current symlinkfolder and capistrano updates the them. \n @filename = \"quarterly_report_#{params[:year]}_#{params[:quarter]}.csv\"\n @file_path = \"#{Rails.root}/../../shared/system/exports/\"\n if params[:quarter] == \"1\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{(params[:year].to_i-1)}-12-31' AND delivery_date<'#{params[:year]}-04-01'\")\n else\n if params[:quarter] == \"2\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-03-31' AND delivery_date<'#{params[:year]}-07-01'\")\n else\n if params[:quarter] == \"3\"\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-06-30' AND delivery_date<'#{params[:year]}-10-01'\")\n else\n if params[:quarter] == \"4\" then\n @tickets = Ticket.find(:all, :conditions => \"delivery_date>'#{params[:year]}-09-30' AND delivery_date<'#{(params[:year].to_i+1)}-01-01'\")\n end\n end\n end\n end\n \n #Writing to file starts with empty line.\n File.open(\"#{@file_path}#{@filename}\", 'w') do |writer|\n writer.puts(\"\\n\")\n end\n \n #From the tickets delivered in the given quarter, the job ids are gathered here\n @job_ids = @tickets.collect {|i| i.job_id }\n @jobs = Job.find(@job_ids)\n \n #To have less DB calls, all specie records are put into an instance variable\n @species = Specie.all\n \n #Goes through all the jobs, for each sums up all the mbf and tonnages and writes them into the file\n # per specie.\n @jobs.each do |k|\n @my_tickets = []\n @tickets.each do |l|\n if l.job_id == k.id\n @my_tickets.push(l)\n end\n end\n \n @amounts = []\n \n @species.length.times do\n @amounts.push([0, 0])\n end\n \n @total_pulp = 0\n \n @my_tickets.each do |i|\n i.load_details.each do |j|\n if i.wood_type == 3 || j.species_id == 0 #wood type 3 & species_id 0 == pulp\n @total_pulp = @total_pulp + j.tonnage\n next #If load is pulp, it has only one load detail so program jups to next loop\n end\n #Amounts of mbf/tonnage are summed up here per ticket according to their specie.\n @amounts[j.species_id-1][0] = @amounts[j.species_id-1][0] + j.mbfss #This and triple-n tonnage in next are helper methods. See their documentation.\n @amounts[j.species_id-1][1] = @amounts[j.species_id-1][1] + j.tonnnage\n end\n end\n \n #Finally, the values calculated above are written into the file.\n File.open(\"#{@file_path}#{@filename}\", 'a') do |writer|\n writer.puts \"Job, #{k.name}\"\n writer.puts \"Category, MBF, Tonnage\"\n @species.each do |i|\n writer.puts \"#{i.code}, #{round_to(@amounts[i.id-1][0].to_f, 2)}, #{round_to(@amounts[i.id-1][1].to_f, 2)}\"\n end\n writer.puts \"Pulp, ,#{round_to(@total_pulp.to_f, 2)}\"\n writer.puts(\"\\n\")\n end\n end\n \n #The file created is opened in 'r' (== read) mode and send to user\n @file = File.open(\"#{@file_path}#{@filename}\", 'r')\n \n send_data(@file.read, :type => \"csv\", :filename => @filename)\n end",
"def summary_csv(user)\n return '' unless user.admin?\n\n if user.admin?\n groupings = self.groupings\n .includes(:group,\n :accepted_students,\n current_result: :marks)\n else\n groupings = self.groupings\n .includes(:group,\n :accepted_students,\n current_result: :marks)\n .joins(:memberships)\n .where('memberships.user_id': user.id)\n end\n\n headers = [['User name', 'Group', 'Final grade'], ['', 'Out of', self.max_mark]]\n self.ta_criteria.each do |crit|\n headers[0] << (crit.bonus? ? \"#{crit.name} (#{Criterion.human_attribute_name(:bonus)})\" : crit.name)\n headers[1] << crit.max_mark\n end\n headers[0] << 'Bonus/Deductions'\n headers[1] << ''\n\n result_ids = groupings.pluck('results.id').uniq.compact\n extra_marks_hash = Result.get_total_extra_marks(result_ids, max_mark: max_mark)\n CSV.generate do |csv|\n csv << headers[0]\n csv << headers[1]\n\n groupings.each do |g|\n result = g.current_result\n marks = result.nil? ? {} : result.mark_hash\n g.accepted_students.each do |s|\n row = [s.user_name, g.group.group_name]\n if result.nil?\n row += Array.new(2 + self.ta_criteria.count, nil)\n else\n row << result.total_mark\n row += self.ta_criteria.map { |crit| marks[crit.id] }\n row << extra_marks_hash[result&.id]\n end\n csv << row\n end\n end\n end\n end",
"def as_csv(*)\n populate_carriers!\n\n CSV.generate do |csv|\n csv << [\n 'key', 'primary_co2_emission',\n *primary_carriers.map { |c| \"primary_demand_of_#{c} (MJ)\" },\n *final_carriers.map { |c| \"final_demand_of_#{c} (MJ)\" }\n ]\n\n @graph.group_nodes(:application_group).each do |node|\n csv << node_row(node)\n end\n end\n end",
"def create_finals_file\n file = File.open('final.csv', 'w')\n $students.each do |student|\n name = student[:name]\n avg = get_avggrades(student)\n file.puts \"#{name} #{avg}\"\n end\n file.close\nend",
"def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end",
"def export_csv\n export_string = \"#{@id},#{type_string},#{@name.gsub(/[\\,,\\s]/,\"\")},\"\n @details.each{|k,v| export_string << \"#{k}=#{v};\".gsub(/[\\,,\\s]/,\"\") }\n export_string\n end",
"def export_csv\n correct\n CSV.open('output.csv', 'a') do |csv|\n @suggestion.each_with_index{|(name,score), index|\n word = @typed_name.capitalize #typed name\n sugges = name.capitalize #suggestion name\n score = (@suggestion[name].round(3)).to_s #score of this suggestion\n years = @year #which years are inserted to dictionary\n ed = \"NA\" #which ED this name belongs to\n ed_count = \"NA\" #how many candidates fall in ED of this name\n\n # Check frequency for the words in suggestion list\n # Typed word, not in the dictionary has zero frequency\n if @working_dictionary.has_key?(name)\n freq = (@working_dictionary[name]).to_s \n else\n freq = 0\n end\n \n # Check which edit distance the word belongs to.\n\n # Check if the word is from @ed1\n if !@ed1.empty?\n if @ed1.include?(name)\n ed = 1.to_s\n ed_count = @count_ed1.to_s\n end\n end\n\n # Check if the word is from @ed2\n if !@ed2.empty?\n if @ed2.include?(name)\n ed = 2.to_s\n ed_count = @count_ed2.to_s\n end \n end\n\n # Build a string to insert in csv file\n if index == 0 # years are displayed only in first line for this @typed_name\n line = [word, sugges, score, freq, ed, ed_count, year] \n else\n line = [word, sugges, score, freq, ed, ed_count] \n end\n\n csv << line\n }\n csv << [] # insert blank line as last line\n end\n end",
"def generate_CSV\n CSV.open(\"./veterans.csv\", \"w\", \n write_headers: true,\n headers: [\"login\", \"name\", \"location\", \"repo count\"]\n ) do |csv|\n @top_ten.each { |row| csv << row }\n end\n end",
"def period_csv\n CSV.generate do |csv|\n report_details.each { |a| csv.add_row(a) }\n csv.add_row [] # Blank row\n time_period_table.each { |a| csv.add_row(a) }\n end\n end",
"def csv_report\n tire_cols = params[:tire] || {}\n ar_cols = params[:activerecord] || {}\n assocs_to_include = params[:assoc][:include] || {}\n params[:assoc][:max] ||= {}\n klass = model_class.constantize\n @filename = \"#{model_class.humanize}.csv\"\n\n response.headers['Content-Type'] ||= 'text/plain'\n response.headers['Content-Disposition'] = \"attachment; filename=#{@filename}\"\n response.headers['Content-Transfer-Encoding'] = 'binary'\n response.headers['Last-Modified'] = Time.now.to_s\n\n # Right, try to define a header:\n header = []\n tire_cols.keys.each { |x| header.push(x.humanize) }\n ar_cols.keys.each { |x| header.push(x.humanize) }\n assocs_to_include.keys.each do |assoc|\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n header.push params[:assoc][assoc.to_sym].keys.first\n elsif params[:assoc][:max][assoc] # has_many\n (1 .. (params[:assoc][:max][assoc].to_i)).each do |i|\n params[:assoc][assoc.to_sym].keys.each do |k|\n header.push(\"#{assoc.singularize.humanize} #{i} #{k.humanize}\")\n end\n end\n else # has_a\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n header.push \"#{assoc.humanize} #{k.humanize}\"\n end\n end\n end\n\n results = klass.search({ per: TireSearch::INFINITY }, 1, '')\n self.response_body = Enumerator.new do |y|\n results.each_with_index do |result, i|\n y << header.to_csv if i == 0\n\n line = []\n tire_cols.keys.each { |x| line.push(result[x]) }\n\n result = result.load if ar_cols.count > 0 || assocs_to_include.keys.count > 0\n\n ar_cols.keys.each { |x| line.push(result.send(x)) } if ar_cols.count > 0\n\n assocs_to_include.keys.each do |assoc|\n related = result.send(assoc)\n if params[:assoc][:max][assoc] == 'join' # Is a has_many with only one real column\n col = params[:assoc][assoc.to_sym].keys.first\n line.push related.map { |x| x.send(col) }.join(' // ')\n elsif params[:assoc][:max][assoc]\n (0 .. (params[:assoc][:max][assoc].to_i - 1)).each do |j|\n params[:assoc][assoc.to_sym].keys.each do |k|\n line.push(related[j] ? related[j].send(k) : nil)\n end\n end\n else\n params[:assoc][assoc.to_sym].keys.each do |k| # Each key requested from the associated record\n line.push related ? related.send(k) : nil\n end\n end\n end\n y << line.to_csv\n GC.start if i % 500 == 0\n end\n end\n end",
"def csv_write (filename = \"students.csv\")\n CSV.open(filename, \"w\") do |row|\n @students.each do |student|\n row << [student[:name], student[:cohort], student[:food]]\n end\n end\nend",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def generateCSV()\n findCommits\n findLinesOfCode\n\n CSV.open(\"data.csv\", \"wb\") {|csv| @students.to_a.each {|elem| csv << elem} }\nend",
"def get_csv\n CSV.generate do |csv|\n csv << ['Name','Student ID','User ID','Role','Email Address','Sections']\n get_feed[:students].each do |student|\n name = student[:last_name] + ', ' + student[:first_name]\n user_id = student[:login_id]\n student_id = student[:student_id]\n email_address = student[:email]\n role = ENROLL_STATUS_TO_CSV_ROLE[student[:enroll_status]]\n sections = sections_to_name_string(student[:sections])\n csv << [name, student_id, user_id, role, email_address, sections]\n end\n end\n end",
"def writeCSV(file_name)\n generated_array = transaction(file_name)\n array_from_csv = read_file(file_name)\n final_hash = merge_arrays(array_from_csv, generated_array)\n binding.pry\n\n CSV.open(\"results.csv\", \"w\") do |row|\n final_hash.each do |nested_hash|\n row << [nested_hash[\"SKU\"], nested_hash[\"quantity\"]]\n end #end each nested_hash loop\n end #end csv loop\nend",
"def to_a11_submissions_csv(start_date:, end_date:)\n non_flagged_submissions = self.submissions.non_flagged.where(\"created_at >= ?\", start_date).where(\"created_at <= ?\", end_date)\n return nil unless non_flagged_submissions.present?\n\n header_attributes = [\n \"standardized_question_number\",\n \"standardized_question_identifier\",\n \"customized_question_text\",\n \"likert_scale_1\",\n \"likert_scale_2\",\n \"likert_scale_3\",\n \"likert_scale_4\",\n \"likert_scale_5\",\n \"response_volume\",\n \"notes\",\n \"start_date\",\n \"end_date\"\n ]\n\n @hash = {\n answer_01: Hash.new(0),\n answer_02: Hash.new(0),\n answer_03: Hash.new(0),\n answer_04: Hash.new(0),\n answer_05: Hash.new(0),\n answer_06: Hash.new(0),\n answer_07: Hash.new(0)\n }\n\n # Aggregate likert scale responses\n non_flagged_submissions.each do |submission|\n @hash.keys.each do |field|\n response = submission.send(field)\n if response.present?\n @hash[field][submission.send(field)] += 1\n end\n end\n end\n\n # TODO: Needs work\n CSV.generate(headers: true) do |csv|\n csv << header_attributes\n\n @hash.each_pair do |key, values|\n @question_text = \"123\"\n if key == :answer_01\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 1\n elsif key == :answer_02\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 2\n elsif key == :answer_03\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 3\n elsif key == :answer_04\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 4\n elsif key == :answer_05\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 5\n elsif key == :answer_06\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 6\n elsif key == :answer_07\n question = questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 7\n end\n\n csv << [\n standardized_question_number,\n key,\n @question_text,\n values[\"1\"],\n values[\"2\"],\n values[\"3\"],\n values[\"4\"],\n values[\"5\"],\n response_volume,\n \"\", # Empty field for the user to enter their own notes\n start_date,\n end_date\n ]\n end\n\n end\n end",
"def save_csv\n CSV.open(@csv_file_path, 'wb') do |csv|\n @recipes.each do |recipe|\n csv << [recipe.name, recipe.description, recipe.rating, recipe.prep_time, recipe.tried]\n end\n end\n end",
"def initialize_csv\n CSV.open(\"results.csv\", \"wb\") do |csv|\n csv << [\"class\", \"title of course\", \"credits\"]\n end\nend",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def to_a11_submissions_csv(start_date:, end_date:)\n non_flagged_submissions = self.submissions.non_flagged\n return nil unless non_flagged_submissions.present?\n\n header_attributes = [\n \"standardized_question_number\",\n \"standardized_question_identifier\",\n \"customized_question_text\",\n \"likert_scale_1\",\n \"likert_scale_2\",\n \"likert_scale_3\",\n \"likert_scale_4\",\n \"likert_scale_5\",\n \"response_volume\",\n \"notes\",\n \"start_date\",\n \"end_date\"\n ]\n\n @hash = {\n answer_01: Hash.new(0),\n answer_02: Hash.new(0),\n answer_03: Hash.new(0),\n answer_04: Hash.new(0),\n answer_05: Hash.new(0),\n answer_06: Hash.new(0),\n answer_07: Hash.new(0)\n }\n\n # Aggregate likert scale responses\n non_flagged_submissions.each do |submission|\n @hash.keys.each do |field|\n response = submission.send(field)\n if response.present?\n @hash[field][submission.send(field)] += 1\n end\n end\n end\n\n # TODO: Needs work\n CSV.generate(headers: true) do |csv|\n csv << header_attributes\n\n @hash.each_pair do |key, values|\n @question_text = \"123\"\n if key == :answer_01\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 1\n elsif key == :answer_02\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 2\n elsif key == :answer_03\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 3\n elsif key == :answer_04\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 4\n elsif key == :answer_05\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 5\n elsif key == :answer_06\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 6\n elsif key == :answer_07\n question = form.questions.where(answer_field: key).first\n response_volume = values.values.collect { |v| v.to_i }.sum\n @question_text = question.text\n standardized_question_number = 7\n end\n\n csv << [\n standardized_question_number,\n key,\n @question_text,\n values[\"1\"],\n values[\"2\"],\n values[\"3\"],\n values[\"4\"],\n values[\"5\"],\n response_volume,\n \"\", # Empty field for the user to enter their own notes\n start_date,\n end_date\n ]\n end\n\n end\n end",
"def make_new_category\n\n puts \"Pick a category name\"\n print \"> \"\n\n category_name = $stdin.gets.chomp\n\n CSV.open('categories.csv', 'a+') do |csv_file|\n csv_file << [category_name]\n end\n\n category = category_name\n link = \"#{category}.csv\"\n\n CSV.open(link, 'wb') do |csv|\n csv << [\"question\", \"answer\"]\n end\n\n new_qa(category_name)\n\nend",
"def csv_writer(rows)\n headers = [\"name\", \"capacity\", \"storage\"]\n CSV.open(\"reservoir_data.csv\", 'w') do |csv|\n csv << headers\n rows.each do |row|\n csv << row\n end\n end\nend",
"def to_csv\n fields = []\n\n fields << '\"' + key.to_s + '\"'\n fields << '\"' + scientific_name + '\"'\n fields << '\"' + (locality || '') + '\"'\n fields << '\"' + (resource ? resource.name : '') + '\"'\n fields << '\"' + (georef? ? 'TRUE':'FALSE') +'\"'\n fields << '\"' + (date_collected ? date_collected.to_s : '') + '\"'\n fields << '\"' + (latitude ? latitude.to_s : '') + '\"'\n fields << '\"' + (longitude ? longitude.to_s : '') + '\"'\n fields << '\"' + (colcode ? colcode.name : '') + '\"'\n fields << '\"' + (instcode ? instcode.name : '') + '\"'\n fields << '\"' + (catalogue_no ? catalogue_no.to_s : '') + '\"'\n\n fields.join(CSV_SEPARATOR)\n end",
"def create_csv_for_GH(csv_data)\n\n csv_string = CSV.open(\"#{$basefile}GH.csv\", \"wb\") do |csv|\n\n csv_data.each do |hash|\n csv << hash\n\n end\n end\n end",
"def create_csv_for_LLR(csv_data)\n\n csv_string = CSV.open(\"#{$basefile}LLR.csv\", \"wb\") do |csv|\n\n csv << csv_data.first.keys\n csv_data.each do |hash|\n csv << hash.values\n end\n end\n end",
"def as_csv\n CSV.generate do |csv|\n csv << BUSINESS_LINE_OPTIONS\n tasks.each do |task|\n csv << [\n business_line.name,\n task.appeal_id,\n task.appeal_type,\n task.appeal.claimant&.name,\n task.appeal.request_issues.size,\n task.appeal.decision_issues.size,\n task.appeal.veteran_file_number,\n task.appeal.intake&.user&.css_id,\n task.type,\n task.id,\n \"https://appeals.cf.ds.va.gov#{business_line.tasks_url}/tasks/#{task.id}\",\n task.assigned_to.try(:name) || task.assigned_to.try(:css_id),\n task.created_at.strftime(\"%Y-%m-%d\"),\n task.closed_at.strftime(\"%Y-%m-%d\")\n ].flatten\n end\n end\n end",
"def table_1\n @table1 = read_table(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"))\n #send_file(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"), :type => 'text/csv', :disposition => 'inline')\n end",
"def format_csv(demographics_data)\n keys = ['geographyName', 'population', 'households', 'incomeBelowPoverty', 'medianIncome']\n csv = demographics_data\n .map { |row| row.fetch_values(*keys) }\n .map { |row| row.join(',')}\n\n keys.join(',') + \"\\n\" + csv.join(\"\\n\")\n end",
"def customer_revenue\n infile, result, *others = params\n\n country_part = \"\"\n country_part << \"#{others[0]}-\" if others[0]\n country_part << \"#{others[1]}-\" if others[1]\n out_file_name = \"#{country_part}customer-revenue-per-year-and-type.csv\"\n\n puts; print \"Creating table from spares and repairs revenue for customers \"+\n \"in #{country_part.chop}\"\n\n rp_order_type = %w{ ZRN ZRK }\n sp_order_type = %w{ ZE ZEI ZO ZOI ZG ZGNT ZRE ZGUP }\n order_type = sp_order_type + rp_order_type\n\n header = \"c19,c20,BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-SP-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-RP-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-SP-O'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-RP-O'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-O'END\"\n\n cols = \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-SP-R':+n10 if #{sp_order_type}.index(c1)END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-RP-R':+n10 if #{rp_order_type}.index(c1)END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-R':+n10 if #{order_type}.index(c1)END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-SP-O':+1 if #{sp_order_type}.index(c1)END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-RP-O':+1 if #{rp_order_type}.index(c1)END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+\"+\n \"'-O':+1 if #{order_type}.index(c1)END\"\n\n sum = \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-SP-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-RP-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-R'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-SP-O'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-RP-O'END,\"+\n \"BEGIN(c0.scan(/\\\\d+\\\\.\\\\d+\\\\.(\\\\d{4})/).flatten[0]||'')+'-O'END\"\n\n Sycsvpro::Table.new(infile: infile,\n outfile: out_file_name,\n header: header,\n key: \"c19,c20\",\n cols: cols,\n nf: \"DE\",\n sum: \"top:#{sum}\",\n sort: \"2\").execute\n\n puts; puts \"You can find the result in #{out_file_name}\"\nend",
"def category_wise_collection_report\n if validate_date\n\n joins = \"LEFT JOIN fee_accounts fa ON fa.id = transport_fee_collections.fee_account_id\"\n @collection = TransportFeeCollection.find_by_id(params[:id], :joins => joins,\n :conditions => \"fa.id IS NULL OR fa.is_deleted = false\")\n if @collection.present?\n filter_by_account, account_id = account_filter\n\n if filter_by_account\n filter_conditions = \"AND finance_transaction_receipt_records.fee_account_id #{account_id == nil ? 'IS' : '='} ?\"\n filter_values = [account_id]\n joins = \"INNER JOIN finance_transaction_receipt_records ON finance_transaction_receipt_records.finance_transaction_id = finance_transactions.id\"\n ft_joins = :finance_transaction_receipt_record\n else\n filter_conditions = joins = \"\"\n ft_joins = \"\"\n filter_values = []\n end\n\n @target_action = \"category_wise_collection_report\"\n @grand_total = @collection.finance_transaction.all(:select => \"amount\",\n :joins => ft_joins, :conditions => [\"(transaction_date BETWEEN ? AND ?) #{filter_conditions}\",\n @start_date, @end_date] + filter_values).map {|x| x.amount.to_f }.sum\n\n @courses = TransportFee.all(\n :joins => \"INNER JOIN transport_fee_finance_transactions ON transport_fees.id = transport_fee_finance_transactions.transport_fee_id\n INNER JOIN finance_transactions ON finance_transactions.id = transport_fee_finance_transactions.finance_transaction_id\n INNER JOIN batches on batches.id = transport_fees.groupable_id #{joins}\",\n :group => \"transport_fees.groupable_id\",\n :conditions => [\"(finance_transactions.transaction_date BETWEEN ? AND ?) AND transport_fees.transport_fee_collection_id = ? AND\n transport_fees.groupable_type = 'Batch' #{filter_conditions}\", @start_date,\n @end_date, params[:id]] + filter_values,\n :select => \"SUM(finance_transactions.amount) AS amount, batches.name AS batch_name, batches.course_id AS course_id,\n transport_fees.groupable_id AS batch_id\").group_by(&:course_id)\n\n @departments = TransportFee.find(:all,\n :joins => \"INNER JOIN transport_fee_finance_transactions ON transport_fees.id = transport_fee_finance_transactions.transport_fee_id\n INNER JOIN finance_transactions ON finance_transactions.id = transport_fee_finance_transactions.finance_transaction_id\n INNER JOIN employee_departments on employee_departments.id=transport_fees.groupable_id #{joins}\",\n :conditions => [\"(finance_transactions.transaction_date BETWEEN ? AND ?) AND\n transport_fees.groupable_type = 'EmployeeDepartment' AND\n transport_fees.transport_fee_collection_id = ? #{filter_conditions}\",\n @start_date, @end_date, params[:id]] + filter_values,\n :group => \"transport_fees.groupable_id\",\n :select => \"employee_departments.name AS dep_name, SUM(finance_transactions.amount) AS amount,\n transport_fees.groupable_id AS dep_id\")\n\n if request.xhr?\n render(:update) do|page|\n page.replace_html \"fee_report_div\", :partial => \"department_wise_transport_collection_report_partial\"\n end\n end\n else\n flash[:notice] = t('flash_msg5')\n if request.xhr?\n render :update do |page|\n page.redirect_to :controller => \"user\", :action => \"dashboard\"\n end\n else\n redirect_to :controller => \"user\", :action => \"dashboard\"\n end\n end\n else\n render_date_error_partial\n end\n end",
"def index\n @search_term = params[:search_term]\n @category = Category.find_by_id(params[:category_id])\n\n if @category.present?\n @bank_transactions = @category.bank_transactions\n elsif @search_term.present?\n @bank_transactions = BankTransaction.search(@search_term)\n else\n @bank_transactions = BankTransaction.all\n end\n\n respond_to do |format|\n format.html\n format.csv { send_data @bank_transactions.to_csv }\n end\n end",
"def outputCSV(reviews, store)\n if reviews.any?\n CSV.open(\"./reviews.csv\", \"wb\") do |csv|\n csv << [\"store\", \"date\", \"version\", \"author\", \"rating\", \"subject\", \"review\"]\n reviews.each_with_index do |review, index|\n csv << [store[:name], review[:date], review[:version], review[:author], review[:rating], review[:subject], review[:body]]\n end\n end\n end\nend",
"def generateCSV data\n CSV.open(\"schools.csv\", \"wb\") do |csv|\n csv << [ \"school_name\", \"school_address\", \"school_type\", \"school_url\", \"school_area\"]\n data.each do |cell|\n csv << [ cell[\"school_name\"], cell[\"school_address\"], cell[\"school_type\"], cell[\"school_url\"], cell[\"school_area\"]]\n end\n end\nend",
"def index\n @pagetitle = \"Suppliers\"\n \n @companies = Company.all\n\n @path = 'suppliers'\n\n @suppliercsv = Supplier.all \n respond_to do |format|\n format.html\n format.csv { send_data @suppliercsv.to_csv }\n \n end\n\n\n end",
"def adquisitions_csv(options = {})\n CSV.generate(options) do |csv|\n csv << ['ID', 'Fecha', 'Cantidad', 'ID Producto', 'ID Provedor']\n all.find_each do |adq|\n csv << [\n adq.id,\n adq.adquisition_date,\n adq.quantity,\n adq.product_id,\n adq.provider_id\n ]\n end\n end\n end",
"def prepare_coversheet_for_csv(csv, _headings, hash)\n csv << [_('Title: '), format(_('%{title}'), title: title)]\n csv << if Array(hash[:attribution]).many?\n [_('Creators: '), format(_('%{authors}'), authors: Array(hash[:attribution]).join(', '))]\n else\n [_('Creator:'), format(_('%{authors}'), authors: hash[:attribution])]\n end\n if hash[:investigation].present?\n csv << [_('Principal Investigator: '),\n format(_('%{investigation}'), investigation: hash[:investigation].map(&:name).join(', '))]\n end\n if hash[:data_curation].present?\n csv << [_('Date Manager: '),\n format(_('%{data_curation}'), data_curation: hash[:data_curation].map(&:name).join(', '))]\n end\n if hash[:pa].present?\n csv << [_('Project Administrator: '), format(_('%{pa}'), pa: hash[:pa].map(&:name).join(', '))]\n end\n if hash[:other].present?\n csv << [_('Contributor: '), format(_('%{other}'), other: hash[:other].map(&:name).join(', '))]\n end\n csv << [_('Affiliation: '), format(_('%{affiliation}'), affiliation: hash[:affiliation])]\n csv << if hash[:funder].present?\n [_('Template: '), format(_('%{funder}'), funder: hash[:funder])]\n else\n [_('Template: '), format(_('%{template}'), template: hash[:template] + hash[:customizer])]\n end\n csv << [_('Grant number: '), format(_('%{grant_number}'), grant_number: grant&.value)] if grant&.value.present?\n if description.present?\n csv << [_('Project abstract: '), format(_('%{description}'), description: Nokogiri::HTML(description).text)]\n end\n csv << [_('Last modified: '), format(_('%{date}'), date: updated_at.localtime.to_date.strftime('%d-%m-%Y'))]\n csv << [_('Copyright information:'),\n _(\"The above plan creator(s) have agreed that others may use as\n much of the text of this plan as they would like in their own plans,\n and customise it as necessary. You do not need to credit the creator(s)\n as the source of the language used, but using any of the plan's text\n does not imply that the creator(s) endorse, or have any relationship to,\n your project or proposal\")]\n csv << []\n csv << []\n end",
"def append_csv(rainfall) # append rainfall details into CSV file\r\n filename = \"rainfall_collection.csv\"\r\n if !File.file?(filename) # places header information\r\n header = [\"Place\",\"year\",\"Month\",\r\n \"day1\",\"day2\",\"day3\",\"day4\",\"day5\",\"day6\",\"day7\",\"day8\",\"day9\",\"day10\",\r\n \"day11\",\"day12\",\"day13\",\"day14\",\"day15\",\"day16\",\"day17\",\"day18\",\"day19\",\"day20\",\r\n \"day21\",\"day22\",\"day23\",\"day24\",\"day25\",\"day26\",\"day27\",\"day28\",\"day29\",\"day30\",\r\n \"day31\"]\r\n CSV.open(filename, \"w+\") do |hdr|\r\n hdr << header\r\n hdr << rainfall\r\n end\r\n else\r\n CSV.open(filename, \"a+\") do |mth|\r\n mth << rainfall\r\n end\r\n end # end for File.file? if file exists\r\n\r\nend",
"def write_to_csv(grocery_list, any_csv_file)\n\tCSV.open(any_csv_file, \"w\") do |csv|\n\t\tcsv << [\"Number\", \"Item\"]\n\t\tgrocery_list.each_index { |index| csv << [index + 1, grocery_list[index]] }\n\tend\nend",
"def to_a11_header_csv(start_date:, end_date:)\n non_flagged_submissions = self.submissions.non_flagged.where(\"created_at >= ?\", start_date).where(\"created_at <= ?\", end_date)\n return nil unless non_flagged_submissions.present?\n\n header_attributes = [\n \"submission comment\",\n \"survey_instrument_reference\",\n \"agency_poc_name\",\n \"agency_poc_email\",\n \"department\",\n \"bureau\",\n \"service\",\n \"transaction_point\",\n \"mode\",\n \"start_date\",\n \"end_date\",\n \"total_volume\",\n \"survey_opp_volume\",\n \"response_count\",\n \"OMB_control_number\",\n \"federal_register_url\"\n ]\n\n CSV.generate(headers: true) do |csv|\n submission = non_flagged_submissions.first\n csv << header_attributes\n csv << [\n submission.form.data_submission_comment,\n submission.form.survey_instrument_reference,\n submission.form.agency_poc_name,\n submission.form.agency_poc_email,\n submission.form.department,\n submission.form.bureau,\n submission.form.service_name,\n submission.form.name,\n submission.form.medium,\n start_date,\n end_date,\n submission.form.anticipated_delivery_count,\n submission.form.survey_form_activations,\n non_flagged_submissions.length,\n submission.form.omb_approval_number,\n submission.form.federal_register_url,\n ]\n end\n end",
"def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend",
"def csv\n @name + \",b,\" + @calories.to_s\n end",
"def to_csv\n # id.to_s << \",\" << last_name << \",\" << first_name << \",\" << email << \",\" << birthdate.to_s\n end",
"def print_csv_results\n FileUtils.mkdir_p(\"data\")\n File.open(\"data/results.csv\", \"w\") do |f|\n @data.sort_by{|id, values| values[\"sum_dose\"]}.each do |id, values|\n f.puts \"%s\\t%d\\t%.4f\" % [id, values[\"ddays\"], dose_unit(values[\"sum_dose\"])]\n end\n end\n end",
"def export_scholarship(specialization)\n beg = Date.parse(TermsCalculator.starting_in(2008)) - 1.month\n fin = beg + 1.month\n sql = 'payed_on > ? and payed_on < ? and index_id = ?'\n indices = Index.find_for_scholarship(User.find_by_login('ticha'),\n :conditions => [\"specialization_id = ?\", specialization],\n :paying_date => fin)\n unless indices.empty?\n (1..13).each do |month|\n filename = '%s_%s.csv' % [specialization.code, fin.strftime('%m_%y')]\n File.open(filename, 'wb') do |outfile|\n CSV::Writer.generate(outfile, ';') do |csv|\n csv << [specialization.name, beg.strftime('%Y-%m-%d'),\n fin.strftime('%Y-%m-%d'), '', '']\n csv << ['name', 'type', 'amount', 'disponent', 'payed_on']\n indices.each do |index|\n if scholarships = Scholarship.find(:all, :conditions =>\n [sql, beg, fin, index.id])\n scholarships.each do |scholarship|\n csv << [scholarship.index.student.display_name,\n scholarship.type.to_s[0, 1], scholarship.amount, scholarship.disponent,\n scholarship.payed_on.strftime('%Y-%m-%d')]\n end\n end\n end\n end\n end\n @@mylog.debug 'Exported' + filename\n system 'iconv -f utf-8 -t cp1250 %s > %s' % [filename, filename.gsub(/\\.csv/, '.win.csv')]\n system 'rm %s' % filename\n beg += 1.month\n fin = beg + 1.month\n end\n end\n end",
"def generate_csv_file(file_path, row_data)\n CSV.open(file_path, \"wb\") do |csv|\n csv << [\"first_name\", \"last_name\", \"dob\", \"member_id\", \"effective_date\", \"expiry_date\", \"phone_number\"]\n row_data.each { |row| csv << row }\n end\nend",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def index\n @datatable = InventoryMovementsDatatable.new\n respond_to do |format|\n format.html\n format.csv do\n day_before_start_date = (@imr_start_date <= @initial_date) ? DateTime.now.end_of_day : (@imr_start_date.to_datetime - 1.day).end_of_day\n ekanek = Organisation::EKANEK_ORG\n a3m = Organisation::A3M_ORG\n params[:organisation_id] = params[:organisation_id].to_i\n\n # \"Brand Name\", \"Brand Owner\", \"Item Name\", \"Item Type\", \"MRP\", \"GWP/Surprise\"\n brand_name_hash, brand_owner_hash, name_hash, item_type_hash, mrp_hash, gwp_surprise_hash = Sku.create_item_detail_hashes\n\n # Opening Stock\n organisation_ids = params[:organisation_id] == 0 ? [ekanek.id, a3m.id] : params[:organisation_id]\n opening_stock_quantity_hash, opening_stock_value_hash = InventoryItem.create_opening_stock_hashes(day_before_start_date, organisation_ids)\n\n organisation_ids = (params[:organisation_id].in? [0, ekanek.id]) ? [ekanek.id, a3m.id] : params[:organisation_id]\n # Total Purchase = 1. Putaway GRN Items\n sku_purchased_quantity_hash, sku_purchased_value_hash = InventoryItem.create_purchased_hashes(@imr_start_date, @imr_end_date, organisation_ids)\n # Bad purchase = 1. Putaway GRN Items Bad\n sku_bad_qty_purchased_hash, sku_bad_value_purchased_hash = InventoryItem.create_bad_purchased_hashes(@imr_start_date, @imr_end_date, organisation_ids)\n\n organisation_id = params[:organisation_id] == 0 ? a3m.id : params[:organisation_id]\n # 2. Putaway Returned by Customers\n sku_quantity_returned_by_customers_hash, sku_value_returned_by_customers_hash = InventoryItem.create_returned_by_customer_hashes(@imr_start_date, @imr_end_date, organisation_id)\n # 2. Putaway Returned by Customers Bad\n sku_qty_bad_stock_returned_by_customer_hash, sku_value_bad_stock_returned_by_customer_hash = InventoryItem.create_bad_returned_by_customer_hashes(@imr_start_date, @imr_end_date, organisation_id)\n # Sold\n sku_quantity_sold_hash, sku_value_sold_hash = InventoryItem.create_sold_hashes(@imr_start_date, @imr_end_date, organisation_id)\n\n # Gatepasses\n organisation_id = params[:organisation_id] == 0 ? ekanek.id : params[:organisation_id]\n sku_quantity_returned_to_vendor_hash, sku_value_returned_to_vendor_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'rtv', organisation_id)\n sku_self_consumed_quantity_hash, sku_self_consumed_value_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'self_consumption', organisation_id)\n sku_quantity_sent_for_brand_collabs_hash, sku_value_sent_for_brand_collabs_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'brand_collabs', organisation_id)\n sku_quantity_sent_to_agency_hash, sku_value_sent_to_agency_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'agency', organisation_id)\n sku_gift_quantity_hash, sku_gift_value_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'gift', organisation_id)\n sku_quantity_liquidated_hash, sku_value_liquidated_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'liquidation', organisation_id)\n sku_stock_transfer_quantity_hash, sku_stock_transfer_value_hash = InventoryItem.create_gatepass_reason_hashes(@imr_start_date, @imr_end_date, 'stock_transfer', organisation_id)\n # Bad Gatepasses\n sku_bad_qty_gatepass_hash, sku_bad_value_gatepass_hash = InventoryItem.create_bad_gatepass_hashes(@imr_start_date, @imr_end_date, organisation_id)\n # Blocked Qty\n sku_blocked_qty_hash = Inventory.create_sku_blocked_qty_hash\n\n require 'csv'\n filename = \n case params[:organisation_id].to_i\n when 0\n 'Consolidated-IMR.csv'\n when ekanek.id\n 'EkAnek-IMR.csv'\n when a3m.id\n 'A3M-IMR.csv'\n end\n\n CSV.open(\"#{filename}\", \"wb\") do |response_csv|\n response_csv << [\"SKU ID\", \"Brand Name\", \"Brand Owner\", \"Item Name\", \"Item Type\", \"MRP\", \"GWP/Surprise\", \"Opening Stock Qty\", \"Opening Stock Value\", \"Qty Purchased\", \"Value Purchased\",\n \"Qty Returned By Customer\", \"Value Returned By Customer\", \"Total Putaway Qty\", \"Total Putaway Value\",\n \"Qty Sold\", \"Value Sold\", \"Qty Returned to Vendor\", \"Value Returned to Vendor\", \"Qty Used for Self Consumption\",\n \"Value Used for Self Consumption\", \"Qty used for Brand Collabs\", \"Value used for Brand Collabs\", \"Qty sent to Agency\",\n \"Value sent to Agency\", \"Qty used as Gift\", \"Value used as Gift\", \"Qty Liquidated\", \"Value Liquidated\",\n \"Stock Transfer Qty\", \"Stock Transfer Value\", \"Gatepass Bad Qty\", \"Gatepass Bad Value\", \"Total Gatepass\", \"Total Gatepass Value\", \"Stock Adjustment\",\n \"Closing Stock Qty\", \"Closing Stock Value\" , \"Good Qty\", \"Good Qty Value\", \"Bad Qty\", \"Bad Qty Value\", \"Qty Blocked\"]\n\n skus = InventoryItem.pluck(:sku_id).uniq\n skus.each do |sku_id|\n report = []\n report << sku_id\n sku = Sku.find sku_id\n\n report << brand_name_hash[sku_id]\n report << brand_owner_hash[sku_id]\n report << name_hash[sku_id]\n report << item_type_hash[sku_id]\n report << mrp_hash[sku_id]\n report << gwp_surprise_hash[sku_id]\n\n opening_stock_quantity = (@imr_start_date <= @initial_date) ? 0 : opening_stock_quantity_hash[sku_id].to_i\n opening_stock_value = (@imr_start_date <= @initial_date) ? 0 : opening_stock_value_hash[sku_id].to_f\n report << opening_stock_quantity\n report << opening_stock_value\n report << sku_purchased_quantity_hash[sku_id].to_i\n report << sku_purchased_value_hash[sku_id].to_f\n report << sku_quantity_returned_by_customers_hash[sku_id].to_i\n report << sku_value_returned_by_customers_hash[sku_id].to_f\n total_putaway_qty = sku_purchased_quantity_hash[sku_id].to_i + sku_quantity_returned_by_customers_hash[sku_id].to_i\n total_putaway_value = sku_purchased_value_hash[sku_id].to_f + sku_value_returned_by_customers_hash[sku_id].to_f\n report << total_putaway_qty\n report << total_putaway_value\n report << sku_quantity_sold_hash[sku_id].to_i\n report << sku_value_sold_hash[sku_id].to_f\n report << sku_quantity_returned_to_vendor_hash[sku_id].to_i\n report << sku_value_returned_to_vendor_hash[sku_id].to_f\n report << sku_self_consumed_quantity_hash[sku_id].to_i\n report << sku_self_consumed_value_hash[sku_id].to_f\n report << sku_quantity_sent_for_brand_collabs_hash[sku_id].to_i\n report << sku_value_sent_for_brand_collabs_hash[sku_id].to_f\n report << sku_quantity_sent_to_agency_hash[sku_id].to_i\n report << sku_value_sent_to_agency_hash[sku_id].to_f\n report << sku_gift_quantity_hash[sku_id].to_i\n report << sku_gift_value_hash[sku_id].to_f\n report << sku_quantity_liquidated_hash[sku_id].to_i\n report << sku_value_liquidated_hash[sku_id].to_f\n report << sku_stock_transfer_quantity_hash[sku_id].to_i\n report << sku_stock_transfer_value_hash[sku_id].to_f\n report << sku_bad_qty_gatepass_hash[sku_id].to_i\n report << sku_bad_value_gatepass_hash[sku_id].to_f\n total_gp_qty = sku_quantity_returned_to_vendor_hash[sku_id].to_i +\n sku_self_consumed_quantity_hash[sku_id].to_i +\n sku_quantity_sent_for_brand_collabs_hash[sku_id].to_i +\n sku_quantity_sent_to_agency_hash[sku_id].to_i +\n sku_gift_quantity_hash[sku_id].to_i +\n sku_quantity_liquidated_hash[sku_id].to_i +\n sku_stock_transfer_quantity_hash[sku_id].to_i +\n sku_bad_qty_gatepass_hash[sku_id].to_i\n total_gp_value = sku_value_returned_to_vendor_hash[sku_id].to_f +\n sku_self_consumed_value_hash[sku_id].to_f +\n sku_value_sent_for_brand_collabs_hash[sku_id].to_f +\n sku_value_sent_to_agency_hash[sku_id].to_f +\n sku_gift_value_hash[sku_id].to_f +\n sku_value_liquidated_hash[sku_id].to_f +\n sku_stock_transfer_value_hash[sku_id].to_f +\n sku_bad_value_gatepass_hash[sku_id].to_f\n report << total_gp_qty\n report << total_gp_value\n report << 0 # stock adjustment 0 for now\n\n inwarded_quantity = total_putaway_qty\n inwarded_value = total_putaway_value\n\n outwarded_quantity = sku_quantity_sold_hash[sku_id].to_i + total_gp_qty\n outwarded_value = sku_value_sold_hash[sku_id].to_f + total_gp_value\n\n closing_stock_quantity = opening_stock_quantity + inwarded_quantity - outwarded_quantity\n closing_stock_value = opening_stock_value + inwarded_value - outwarded_value\n\n # total_bad = putaway_grn_bad + putaway_return_bad - gatepass_bad\n bad_qty = sku_bad_qty_purchased_hash[sku_id].to_i +\n sku_qty_bad_stock_returned_by_customer_hash[sku_id].to_i -\n sku_bad_qty_gatepass_hash[sku_id].to_i\n bad_stock_value = sku_bad_value_purchased_hash[sku_id].to_f +\n sku_value_bad_stock_returned_by_customer_hash[sku_id].to_f -\n sku_bad_value_gatepass_hash[sku_id].to_f\n\n good_qty = closing_stock_quantity - bad_qty\n good_stock_value = closing_stock_value - bad_stock_value\n\n report << closing_stock_quantity\n report << closing_stock_value\n report << good_qty\n report << good_stock_value\n report << bad_qty\n report << bad_stock_value\n report << sku_blocked_qty_hash[sku_id].to_i\n response_csv << report\n end\n\n send_file(\n \"#{filename}\",\n filename: \"#{filename}\",\n )\n end\n end\n end\n end",
"def update_meal_to_csv\n CSV.open(@csv_file, \"w\") do |csv|\n csv << [\"id\", \"name\", \"price\"]\n @meals.each do |meal|\n csv << [meal.id, meal.name, meal.price]\n end\n end\n end",
"def write_to_file(course)\n className = \"#{course[\"subject\"]} #{course[\"catalogNbr\"]}\"\n title = course[\"titleLong\"]\n credits = course[\"enrollGroups\"][0][\"unitsMaximum\"]\n CSV.open(\"results.csv\", \"a\") do |csv|\n csv << [className, title, credits]\n end\nend",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end",
"def createdata\n begin\n @t= Entry.find(:all,:conditions=>[\"ShopName in (?) and status=0\",params[:Entries][:ShopName]])\n @t.each do |c|\n c.status=1\n c.save\n end\n date=Time.now().strftime(\"%Y%m%d\")\n @cluster=Shop.find_first([\"ShopName=?\",params[:Entries][:ShopName]])\n fname=\"#{@cluster.ClusterName}_#{params[:Entries][:ShopName]}_\"+date+\".sql\"\n total=0\n count=0\n @t= Entry.find(:all,:conditions=>[\"ShopName in (?) and status=1 \",params[:Entries][:ShopName]])\n FasterCSV.open(fname, \"w\") do |csv|\n for sname in @t\n @a=[sname.ClusterName,sname.ShopName,sname.Date,sname.GroupID,sname.MachineNo,sname.ScreenIN,sname.ScreenOUT,sname.MeterIN,sname.MeterOUT,sname.Machineshort,sname.Shortreason,sname.status]\n csv<< @a\n end\n end\n pw=Dir.pwd()\n Dir.chdir(pw)\n filename=pw.to_s+\"/\"+fname\n render :update do |page|\n page << \"document.getElementById('aux_div').style.visibility = 'visible'\"\n page.redirect_to url_for(:controller=>'entries', :action=>'new')\n end\n rescue Exception=>ex\n puts ex.message\n end\n end",
"def student_all_report\n @user = User.find(params[:id])\n report = StringIO.new\n CSV::Writer.generate(report, ',') do |csv|\n #Get Words\n @words = @user.word_answers.find(:all, :order => \"question_id ASC\")\n csv << %w(question answer date)\n @words.each do |answer|\n csv << [answer.word.value, answer.value, answer.created_at]\n end\n #Get Situations\n @situation_answers = @user.completion_answers.find(:all, :order => \"question_id ASC\")\n @situation_answers.each do |answer|\n csv << [answer.completion.value, answer.value, answer.created_at]\n end\n #Get Scenarios\n @scenario_answers = @user.scenario_answers.find(:all, :order => \"question_id ASC\")\n @scenario_answers.each do |answer|\n csv << [answer.scenario.value, answer.value, answer.created_at]\n end\n \n #Get posts\n @posts = @user.posts.find(:all, :order => \"created_at DESC\")\n @posts.each do |post|\n csv << [post.topic.title, post.body, post.created_at]\n end\n end\n export_as_csv(report,@user.whole_name+\"-all\")\n end",
"def to_csv(created_at, updated_at)\n csv = Array.new\n csv.push id.to_s\n csv.push last_name.gsub(/\\\\/, \"\")\n csv.push first_name ? first_name.gsub(/\\\\/, \"\") : NULL\n csv.push fed\n csv.push title || NULL\n csv.push gender\n csv.push born ? born.to_s : NULL\n csv.push rating ? rating.to_s : NULL\n csv.push NULL # ICU ID\n csv.push created_at\n csv.push updated_at\n csv.join(\",\")\n end",
"def raw_export\n return unless has_permission :can_do_billing\n case request.method\n when :get\n @page_title = 'Generate Raw Data Report'\n @from = Time.now.beginning_of_month\n @to = @from.next_month\n when :post\n from = Date.new(params[:from][:year].to_i,params[:from][:month].to_i)\n to = Date.new(params[:to][:year].to_i,params[:to][:month].to_i)\n @records = BillingCharge.find(:all,:conditions=>['billing_charges.created_at > ? and billing_charges.created_at < ?',from,to],\n :order=>['billing_charges.id'],:include=>[:pilot,:instructor,:aircraft])\n report = StringIO.new\n CSV::Writer.generate(report, ',') do |csv|\n csv << %w(Timestamp Pilot Amount Total Type Note Aircraft Aircraft_Rate Instructor Instructor_Rate Hobbs_Start Hobbs_End Tach Ground_Instruction)\n @records.each do |r|\n csv << [r.created_at, r.pilot.full_name_with_id, r.charge_amount, r.running_total, \n r.type.to_s == 'FlightRecord' ? 'flight/ground instruction' : r.type, r.notes, r.aircraft_id.nil? ? nil : r.aircraft.identifier, \n r.aircraft_rate, r.instructor_id.nil? ? nil : r.instructor.full_name_with_id,\n r.instructor_rate, r.hobbs_start, r.hobbs_end, r.tach_end, r.ground_instruction_time]\n end\n end\n\n report.rewind\n send_data(report.read,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :filename => 'report.csv')\n end\nend",
"def create_csv_file(keys, strings, comments = nil)\n raise \"csv_filename must not be nil\" unless @csv_filename\n CSV.open(@csv_filename, \"wb\") do |csv|\n @headers << \"Comments\" if !comments.nil? && !comments.empty?\n csv << @headers\n keys.each do |key|\n line = [key]\n default_val = strings[@default_lang][key] if strings[@default_lang]\n @filenames.each do |fname|\n lang = fname\n current_val = (lang != default_lang && strings[lang][key] == default_val) ? '' : strings[lang][key]\n line << current_val\n end\n line << comments[key] if comments && comments[key]\n csv << line\n end\n puts \"Done\"\n end\n end",
"def index\n # @articles = Article.includes(:criminal_code)\n @articles = Article.with_current_and_all_prisoner_count\n\n respond_to do |format|\n format.html\n format.json\n format.csv do\n send_data Article.to_csv,\n filename: GeneratedFile.clean_filename('articles', 'csv'),\n type: 'text/csv'\n end\n end\n end",
"def csvexport_all_tables\n @wires = Wire.all.sort_by {|obj| obj.kabeltyp}\n @switchgears_einbau = Switchgear.all.sort_by {|obj| obj.kennung}\n @switchgears = Switchgear.where(:typ => 1).sort_by {|obj| obj.kennung}\n @assemblies = Assembly.all.sort_by {|obj| obj.kennung}\n @electrical_installations = ElectricalInstallation.all.sort_by {|obj| obj.kennung}\n @drives = Drive.all.sort_by {|obj| obj.kennung}\n @devices = Device.all.sort_by {|obj| obj.definition}\n @iogroups = Iogroup.all.sort_by {|obj| obj.name}\n @switchgearcombinations = Switchgearcombination.all.sort_by {|obj| obj.name}\n @offertpositions = Offertposition.where(:subsubproject_id => params[:subsubproject_id]).sort_by {|obj| obj.name}\n @subsubproject = Subsubproject.find(params[:subsubproject_id])\n @subsubprojects = @subsubproject.subproject.subsubprojects.sort_by {|obj| obj.name}\n @subsystems = @subsubproject.subproject.project.subsystems.all.sort_by {|obj| obj.name}\n @units = Unit.where(:subsystem_id => @subsystems.pluck(:id)).sort_by {|obj| obj.name}\n\n CSV.open(\"export_all_tables#{Time.now.strftime(\"%Y-%m-%d-%H-%M\")}.csv\", \"wb\", {:headers => true, :encoding => \"iso-8859-1\", :col_sep => \";\"}) do |csv|\n csv << ['Geraetetypen', '']\n @devices.each do |entry| csv << [entry.id, entry.definition] end\n csv << ['SPS-Modultypen', '']\n @assemblies.each do |entry| csv << [entry.id, entry.kennung] end\n csv << ['Frequenzumrichtertypen', '']\n @drives.each do |entry| csv << [entry.id, entry.kennung] end\n csv << ['Kabeltypen', '']\n @wires.each do |entry| csv << [entry.id, entry.kabeltyp] end\n csv << ['Elektroinstallationstypen', '']\n @electrical_installations.each do |entry| csv << [entry.id, entry.kennung] end\n csv << ['Schaltschranktypen', '']\n @switchgears.each do |entry| csv << [entry.id, entry.kennung] end\n csv << ['Schaltschrankeinbautypen', '']\n @switchgears_einbau.each do |entry| csv << [entry.id, entry.kennung] end\n csv << ['Schaltgeraetekombinationen', '']\n @switchgearcombinations.each do |entry| csv << [entry.id, entry.name] end\n csv << ['IO-Gruppen', '']\n @iogroups.each do |entry| csv << [entry.id, entry.name] end\n csv << ['Offertpositionen', '']\n @offertpositions.each do |entry| csv << [entry.id, entry.name] end\n csv << ['Teilanlagen', '']\n @subsystems.each do |entry| csv << [entry.id, entry.name] end\n csv << ['TeilanlagenUnits', '']\n @units.each do |entry| csv << [entry.id, entry.name] end\n csv << ['Projektversionen', '']\n @subsubprojects.each do |entry| csv << [entry.id, entry.name] end\n end\n\n redirect_to settings_path, :notice => \"Export wurde unter \" + Rails.root.to_s + \"/ abgelegt!\"\n end",
"def save_to_csv(jobs)\n CSV.open('../docs/cryptocurrencyjobslist.csv', 'wb') do |csv|\n csv << [\"Company\", \"Title\", \"Link\", \"Location\", \"Category\"]\n for i in 0..jobs.length-1\n csv << [jobs[i].company, jobs[i].title, jobs[i].listing_url, jobs[i].location, jobs[i].category]\n end\n end\nend",
"def write(file_name, income_taxes)\n CSV.open(\"data/#{file_name}.csv\", 'wb') do |csv|\n csv << [\"client\", \"income_tax\"]\n income_taxes.each do |income_tax|\n csv << [income_tax.first[0], income_tax.first[1]]\n end\n end\n end",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def flavor_report_csv(filename)\n puts 'Creating the Flavors report'\n CSV.open(\"#{filename}\", 'ab') do |csv|\n csv << [\"\\n\"]\n csv << %w(Compute_Template_List)\n csv << %w(Template_Name VCPU Memory Min_VCPU Max_VCPU Min_EC Desired_EC Max_EC Dedicated_CPU Min_Mem Max_Mem Proc_Compatibility_Mode CPU_Pool_Name Shared_CPU_Weight SRR_Capability)\n flavor_report\n csv_array(@flavor_print_array, csv)\n end\n puts 'Done'\n end",
"def getCsvText\r\n sep = \"|\"\r\n t = \"\"\r\n \r\n t << \"Question Number\" + sep \r\n questions.size.times do |i| \r\n t << (i + 1).to_s + sep \r\n end\r\n t << \"Points\"\r\n t << \"\\n\"\r\n \r\n t << \"Correct Answer\" + sep\r\n for question in questions \r\n t << correctAnswerText(question).to_s + sep \r\n end \r\n t << \"\\n\"\r\n \r\n for user in users \r\n t << user.getName + sep\r\n for question in questions \r\n t << answerText(user, question) + sep\r\n end \r\n t << getPoints(user).to_s\r\n t << \"\\n\" \r\n end\r\n return t \r\nend",
"def write_csv filename\n CSV.open(filename, \"w\") do |csv|\n @listOfItem.each_with_index { |item, index| \n if (index == 0)\n csv << [\"Quantity\", \"Name\", \"Total\"]\n end\n # check the product is valid\n if (item.product != nil)\n csv << [item.quantity, item.product.name, item.total]\n else\n csv << [\"nil\", \"nil\", \"nil\"]\n end\n if (index == @listOfItem.size - 1)\n csv << [\"Sales taxs:\", sales_tax]\n csv << [\"Total:\", total]\n end\n }\n end\n end",
"def create_scale_results_csv_file(scale_results_parent_dir)\n CSV.open(\"#{scale_results_parent_dir}/PERF_SCALE_#{@scale_timestamp}.csv\", \"wb\") do |csv|\n headings = [\"agents\",\n \"ok\",\n \"ko\",\n \"combined mean\",\n \"catalog mean\",\n \"filemeta plugins mean\",\n \"filemeta pluginfacts mean\",\n \"locales mean\",\n \"node mean\",\n \"report mean\",\n \"average CPU %\",\n \"average memory\"]\n\n csv << headings\n end\n end",
"def new_qa(category_input_push)\n\n category = category_input_push\n link = \"#{category}.csv\"\n\n puts \"Please input the question\"\n print \"> \"\n question = $stdin.gets.chomp\n puts \"Please input the answer\"\n print \"> \"\n answer = $stdin.gets.chomp\n # skip past the headers by setting the headers to true\n CSV.open(link, 'a+') do |csv_file|\n # add a row to the csv file\n csv_file << [question, answer]\n end\n new_qa_opt\nend",
"def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end",
"def index\n @grants = Grant.all\n @grant_count = @grants.count\n @year_count = @grants.group(:fiscal_year).order(:fiscal_year).count\n @grant_type_count = @grants.group(:grant_type).order(:grant_type).count\n @strategic_priority_count = @grants.group(:strategic_priority).order(:strategic_priority).count\n\t@strategic_results_count = @grants.group(:strategic_results).order(:strategic_results).count\n\t@location_count = @grants.group(:location).order(:location).count\n\t\n\t#fy query\n @fy13 = @grants.where(\"fiscal_year = ?\", 2013)\n @fy14 = @grants.where(\"fiscal_year = ?\", 2014)\n @fy15 = @grants.where(\"fiscal_year = ?\", 2015)\n @fy16 = @grants.where(\"fiscal_year = ?\", 2016)\n \n #by grant type query\n @ahahui = @grants.where(\"grant_type = ?\", \"Ahahui\").count\n @bot = @grants.where(\"grant_type = ?\", \"BOT Initiative\").count\n @community_grant = @grants.where(\"grant_type = ?\", \"Community Grant\").count\n @sponsorship = @grants.where(\"grant_type = ?\", \"Sponsorship\").count\n \n #by strategic priority\n @land = @grants.where(\"strategic_priority = ?\", \"Land & Water\").count\n @edu = @grants.where(\"strategic_priority = ?\", \"Education\").count\n @eco = @grants.where(\"strategic_priority = ?\", \"Economic Self-Sufficiency\").count\n @health = @grants.where(\"strategic_priority = ?\", \"Health\").count\n @gov = @grants.where(\"strategic_priority = ?\", \"Governance\").count\n @culture = @grants.where(\"strategic_priority = ?\", \"Culture\").count\n \n \t#export to all data to excel\n\trespond_to do |f|\n\t\tf.html\n\t\tf.xls\n\t\tf.pdf do\n\t\t\trender :pdf => \"Report Pulled on\" + \" \" + \"#{Time.now.strftime(\"%m/%d/%Y\")}\", :orientation => 'Landscape'\n\t\tend\n end\n \n end",
"def build_csv(file_name)\n\t\t\t\t\t\tarr = [\"Location URN\", \"Email List\"]\n\t\t\t\t\t\t\t\tCSV.open(file_name, \"wb\") do |csv|\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcsv << arr\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tend\nend",
"def create_csvs(csv_count)\n for num in 1..csv_count\n headers = filtered_schema_headers(num)\n CSV.open(\"data#{num}.csv\", \"w\",:write_headers => true, :headers => headers) do |csv|\n populate_csv(csv, headers)\n end\n end\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def build_resource_csv(resource)\n if resource.empty?\n return Array.new(21)\n \n else\n identified_resources=IdentifiedResourceSet.new(resource)\n if @commodity.class == String\n commodities=[@commodity]\n else\n commodities = @commodity\n end\n economic_row = Array.new\n units_row = Array.new\n resource_row = Array.new\n reserves_row = Array.new\n paramarginal_row = Array.new\n submarginal_row = Array.new\n inferred_row = Array.new\n\n commodity_row = Array.new\n \n commodities.each do |c|\n \n if c.in?(identified_resources.commodities)\n commodity_row << c\n units_row = [identified_resources.economic[c][:units][:ore], identified_resources.economic[c][:units][:mineral], identified_resources.economic[c][:units][:grade] ]\n ## FIX ABOVE LINE\n \n \n resource_row = [identified_resources.date[:end], identified_resources.material.to_sentence]\n reserves_row = [identified_resources.reserves[c][:ore], identified_resources.reserves[c][:mineral], identified_resources.reserves[c][:grade] ]\n economic_row = [identified_resources.economic[c][:ore], identified_resources.economic[c][:mineral], identified_resources.economic[c][:grade] ]\n paramarginal_row = [identified_resources.paramarginal[c][:ore] , identified_resources.paramarginal[c][:mineral], identified_resources.paramarginal[c][:grade] ]\n submarginal_row = [identified_resources.submarginal[c][:ore], identified_resources.submarginal[c][:mineral], identified_resources.submarginal[c][:grade] ]\n inferred_row = [identified_resources.inferred[c][:ore], identified_resources.inferred[c][:mineral], identified_resources.inferred[c][:grade] ]\n commodity_row += resource_row + units_row + reserves_row + economic_row + paramarginal_row + submarginal_row + inferred_row\n else\n commodity_row += Array.new(21)\n end\n end\n return commodity_row\n end\n \n end",
"def index\n @companies = Company.where(user_id: current_user.id).order(\"name\")\n @path = 'products'\n @pagetitle = \"Products\"\n\n @company = Company.find_by(user_id: current_user.id)\n @products = @company.get_products2\n @products_market = Product.where(\"products_category_id <> 1\")\n respond_to do |format|\n format.html\n format.xls \n format.csv { send_data @products.to_csv, filename: \"products-#{Date.today}.csv\" }\n \n end\n\n end",
"def index\n #@tenacities = Tenacity.all\n @search = Tenacity.search(params[:q])\n @tenacities = @search.result\n @search.build_condition \n \n respond_to do |format|\n format.html\n format.csv { send_data @tenacities.to_csv }\n format.xls\n end \n end",
"def csv\n # http://railscasts.com/episodes/362-exporting-csv-and-excel?view=asciicast\n respond_to do |format|\n format.html\n format.csv { render text: @ticket.to_csv }\n end\n #render text: @work_order.to_csv\n end",
"def genCsvSummary()\n csv = [] ;\n to_a.each{|analyzer|\n csv.push(analyzer.genCsvRowSummary()) ;\n }\n return csv ;\n end",
"def stg_provider_report_csv(filename)\n puts 'Creating the Storage Providers report'\n CSV.open(\"#{filename}\", 'ab') do |csv|\n csv << [\"\\n\"]\n csv << %w(Storage_Providers_List)\n csv << %w(Storage_Provider_Name Storage_Provider_Type Storage_Provider_Health Storage_Provider_Total_Cap Storage_Provider_Free_Cap)\n stg_provider_report\n csv_array(@stg_provider_csv_array, csv)\n end\n puts 'Done'\n end"
] | [
"0.65237296",
"0.63733584",
"0.61526465",
"0.61163276",
"0.6080415",
"0.604004",
"0.59639037",
"0.5943307",
"0.58680505",
"0.5860356",
"0.58485687",
"0.5837901",
"0.5829537",
"0.58263755",
"0.57953906",
"0.5744318",
"0.5739315",
"0.57381576",
"0.57033294",
"0.57029927",
"0.56965685",
"0.5658008",
"0.56320876",
"0.5627926",
"0.5619779",
"0.5616594",
"0.5616594",
"0.5609828",
"0.56063193",
"0.55853903",
"0.5580932",
"0.5575446",
"0.55729717",
"0.55642974",
"0.5552405",
"0.55394924",
"0.55358696",
"0.55316126",
"0.55310047",
"0.5527156",
"0.5512816",
"0.5506139",
"0.55034095",
"0.54849255",
"0.54801285",
"0.54785633",
"0.54768074",
"0.5469708",
"0.54691696",
"0.5453735",
"0.5436287",
"0.542918",
"0.54236424",
"0.5416067",
"0.541161",
"0.53897583",
"0.5380989",
"0.53809804",
"0.5378794",
"0.53783035",
"0.53774154",
"0.5376867",
"0.53754145",
"0.5371689",
"0.5370129",
"0.5365265",
"0.5358961",
"0.53548205",
"0.535428",
"0.5348503",
"0.53471744",
"0.5345495",
"0.5344025",
"0.53365874",
"0.53271234",
"0.5327034",
"0.5324617",
"0.53231573",
"0.53215486",
"0.5317036",
"0.5308789",
"0.5308165",
"0.5307006",
"0.53069985",
"0.5305904",
"0.5302123",
"0.5295535",
"0.529452",
"0.5290513",
"0.5289828",
"0.5287292",
"0.52824795",
"0.5279499",
"0.52788615",
"0.5272068",
"0.5271979",
"0.52702916",
"0.5268384",
"0.52588546",
"0.5247556"
] | 0.62720853 | 2 |
downloads csv of research data individual charity to users computer | def get_individual
individual = User.find(params[:charity_id])
if Project.exists?(userID: params[:charity_id])
csv_data = Project.particular_csv(individual.id)
respond_to do |format|
format.html
format.csv do
send_data csv_data, filename: "individual_research_expenditure_data#{Time.now.to_s(:db)}.csv"
end
# format.CSV {render csv: @grant_data.to_csv}
# format.xls {render text: @grant_data.to_csv(col_sep: "\t")}
end
else
flash[:notice] = 'No Research Data for ' + individual.charity_name
redirect_to :controller => 'amrc_reports', :action => 'research_individual'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def save_as_csv\n CSV.open(\"./db/#{@name}.csv\", \"wb\") {|csv| @result_scrap.to_a.each {|elem| csv << elem} }\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def do_csv_search(params, download)\n s = do_search(params.merge({:limit => self.count, :offset => 0}))\n \n # any possible 'speed up' would need to be done here:\n results = s.results.map do |obj|\n obj.search_result_format\n end\n\n headers = results.first.keys\n filename = download.filename\n user = download.user\n id = download.id\n path = \"tmp/#{id}_#{user}_#{filename}\"\n \n csv_file = CSV.open(path, \"wb\") do |csv|\n csv << headers\n results.each do |r|\n csv << r.values \n end\n end\n\n Zip::File.open(\"#{path}.zip\", Zip::File::CREATE) do |zipfile|\n zipfile.add(filename, path)\n end\n\n File.delete(path) if File.exist?(path)\n\n download.update({status: 1, filename: \"#{filename}.zip\"})\n #download.created_by.notify(\"Your download '#{download.filename}' is ready.\")\n end",
"def csv_for_company\n filter = params[:filter]\n company_name = Company.find(params[:company_id]).name\n csv_name = filter.present? ?\n \"#{company_name}_operations_filtered_by_#{filter}.csv\" :\n \"#{company_name}_operations.csv\"\n respond_to do |format|\n format.csv { send_data to_csv ,filename: csv_name}\n end\n end",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def get_rex\n if Project.exists?\n @grant_data = Project.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"research_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:success] = \"Research Expenditure Table is Empty\"\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def save_as_csv\n result = get_result_all_city()\n begin\n CSV.open(\"db/city.csv\", \"wb\") do |csv|\n result.each do |city|\n convert = city.to_a\n csv << convert[0]\n end\n end\n rescue => exception\n puts \"Some errors have orcurred!\"\n end\n end",
"def write_to_csv (time, platform, browser_name, browser_version, build, counter, num_cases, delay, duration, rate, test_name)\n googledrive_path=\"Google Drive/CODAP @ Concord/Software Development/QA\"\n localdrive_path=\"Documents/CODAP data/\"\n\n if !File.exist?(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\") || $new_file\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"wb\") do |csv|\n csv<<[\"Time\", \"Platform\", \"Browser\", \"Browser Version\", \"CODAP directory\", \"CODAP Build Num\", \"Test Name\", \"Counter\", \"Num of Cases\", \"Delay (s)\", \"Time Result (ms)\", \"Rate (cases/sec)\"]\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n else\n CSV.open(\"#{Dir.home}/#{$dir_path}/#{$save_filename}\", \"a\") do |csv|\n csv << [time, platform, browser_name, browser_version, build, $buildno, test_name, counter, num_cases, delay, duration, rate]\n end\n end\nend",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def get_unsubmitted\n @grant_data = User.to_csv_unsubmitted\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"unsubmitted_charity_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def save_csv\n @csv_content = params[:csv_content]\n if !@csv_content.nil? && !@csv_content.empty?\n #csv_content_windows1255 = Iconv.conv('windows-1255', 'utf-8', @csv_content)\n #send_data csv_content_windows1255,\n send_data @csv_content,\n :filename => \"#{Rails.cache.read('csv_content_type')}.csv\",\n :type => 'text/csv'\n #:type => 'text/csv; charset=windows-1255'\n else\n render :home\n end\n end",
"def csv(section = 'main', q='google',date='ytd',geo='')\n trend_params = {\"graph\"=>\"all_csv\", \"sa\" => \"N\"}\n trend_params[\"q\"] = q\n trend_params[\"date\"] = date\n if !geo || geo != ''\n trend_params[\"geo\"] = geo\n end\n\n data = @client.get_content(URI.parse(@url_Export), trend_params)\n # empty to return all data\n if section == ''\n return CSV.parse(data)\n end\n # split data into sections\n segments = data.split(\"\\n\\n\\n\")\n if section == 'main'\n section = ['Week', 'Year', 'Day','Month']\n else\n section = [section]\n end\n\n for x in segments do\n if section.include? x.split(',')[0].strip\n maindata = CSV.parse(x)\n return maindata\n end\n end\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def export(organization_name)\n CSV.open(\"/tmp/#{organization_name}.csv\", \"w\") do |csv|\n csv << [\"Name\", \"Upload Date\", \"Steps\", \"Aerobic Steps\", \"Calories\", \"Miles\", \"Device Serial\", \"Input Method\"]\n self.uploads.includes(:person).except(:order).find_each(batch_size: 6000) do |upload|\n if upload.is_device_input == 1\n input_method = \"Synced from Device\"\n else\n input_method = \"Manually Uploaded\"\n end\n csv << [\"#{upload.person.first_name} #{upload.person.last_name}\", upload.date.try(:strftime, \"%b %d %Y\"), upload.total_steps, upload.aerobic_steps, upload.calories, ('%.2f' % upload.distance), upload.device_serial, input_method]\n end\n end\n end",
"def index\n @cautelas = Cautela.search(params[:search], params[:page])\n respond_to do |format|\n format.html\n #format.csv { send_data(@cautelas.to_csv) }\n format.csv {\n filename = \"CEPI_Cautelas-#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\"\n send_data(@cautelas.to_csv, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename)\n }\n end\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def download_csv(csv_string, file_name)\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv;charset=utf-8;header=present'\n# send_data csv_string, :filename => file_name, :disposition => 'attachment', :type => 'text/csv;charset=utf-8;header=present'\n end",
"def save\n CSV.open(\"./db/gossip.csv\", \"ab\") do |csv|\n csv << [@author, @content]\n end\nend",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def get_individual_grant\n individual = User.find(params[:charity_id])\n if GrantsData.exists?(userID: params[:charity_id])\n csv_data = GrantsData.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_grants_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Grant Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'grants_individual'\n end\n end",
"def save_as_csv\n h = get_townhall_urls\n CSV.open('db/emails.csv', 'wb', write_headers: true, headers: ['Mairies du 95', 'Email']) do |csv|\n h.to_a.each { |elem| csv << elem }\n end\n puts \"\\nTon fichier csv est prêt\\n\\n\"\n Index.new.index\n end",
"def download_sample\n if can?(:>, \"4\")\n send_file Rails.public_path+\"/excel/Import/sample_file_part_upload.csv\", :disposition => \"attachment\"\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def save_students\n file = CSV.open(@user_file, \"w\") do |csv|\n @students.each do |student|\n student_data = [student[:name], student[:cohort]]\n csv_line = student_data.join(\",\")\n csv << csv_line\n end\n end\nend",
"def generateCSV data\n CSV.open(\"schools.csv\", \"wb\") do |csv|\n csv << [ \"school_name\", \"school_address\", \"school_type\", \"school_url\", \"school_area\"]\n data.each do |cell|\n csv << [ cell[\"school_name\"], cell[\"school_address\"], cell[\"school_type\"], cell[\"school_url\"], cell[\"school_area\"]]\n end\n end\nend",
"def fetch_csv\n convert_to_csv(fetch)\n end",
"def generate_csv\n\n fields = @resource.typus_fields_for(:csv)\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = Rails.root.join(\"tmp\", \"export-#{@resource.to_resource}-#{Time.now.utc.to_s(:number)}.csv\")\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields.keys\n @resource.find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map do |key, value|\n case value\n when :transversal\n a, b = key.split(\".\")\n record.send(a).send(b)\n when :belongs_to\n record.send(key).to_label\n else\n record.send(key)\n end\n end\n end\n end\n end\n\n send_file filename\n\n end",
"def generate_csv_file(csv_string,assessment_name)\n variable_time = Time.now.strftime(\"%Y%m%d\")\n #The file name is generated based on time.\n filename = assessment_name+variable_time+\".csv\"\n #invoke send_data of fastercsv gem to generate a csv\n send_data(csv_string, :type => \"text/plain\", :filename => filename)\n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def generate_csv_file(file_path, row_data)\n CSV.open(file_path, \"wb\") do |csv|\n csv << [\"first_name\", \"last_name\", \"dob\", \"member_id\", \"effective_date\", \"expiry_date\", \"phone_number\"]\n row_data.each { |row| csv << row }\n end\nend",
"def getData()\n CSV.foreach(\"SkaterProjections.csv\", :headers => true) do |row|\n @skater_data.push(row)\n end\n \n CSV.foreach(\"GoalieProjections.csv\", :headers => true) do |row|\n @goalie_data.push(row)\n end\nend",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def save_to_csv(players)\n CSV.open('../docs/collegeQBstats.csv', 'wb') do |csv|\n csv << [\"NAME\", \"POS\", \"CMP\", \"ATT\", \"CMP%\", \"YDS\", \"AVG\", \"LNG\", \"TD\", \"INT\", \"SACK\", \"RTG\"]\n for i in 0..players.length-1\n csv << [players[i].name, players[i].college, players[i].pos, players[i].cmp, players[i].att, players[i].cmp_pct, \n players[i].yds, players[i].lng, players[i].td, players[i].int, players[i].sack, players[i].rtg]\n end\n end\nend",
"def download_handoff_report\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure_new(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv_new(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report-New.csv'\n\n \tend",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def persist \n require 'csv'\n @csv_string = CSV.generate do |csv|\n csv << Record.attribute_names\n Record.find_each do |record|\n csv << record.attributes.values\n end\n end\n return send_data(@csv_string, :filename => \"downloadTest.csv\") \n end",
"def index\n @search = Claim.order('claims.created_at desc').search(params[:q])\n @claims = @search.result(:distinct => true).paginate(:per_page => 50, :page => params[:page])\n respond_to do |format|\n format.html{}\n format.csv {\n send_data generate_csv, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=claims_list.csv\" \n }\n end\n end",
"def save_students\n File.open(user_filename, \"w\") do |file|\n @students.each do |student|\n student_data = [student[:name], student[:cohort], student[:hobbies]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n end\nend",
"def export_csofeed\n # Create header row #\n header = ['Record Type', 'Device Key', 'IP Addresses', 'MAC Addresses', 'System Name', 'FQDN', 'Status', 'Function', 'Runs MOTS/PRISM Apps', 'MOTS/PRISM IDs', 'Runs Non-MOTS/PRISM Apps', 'Internet Facing', 'Device Criticality', 'Device Owner', 'Operating System', 'Operating System Version', 'Administrator\\'s ATTUID', 'Support Group', 'Serial Number', 'Asset Tag Number', 'Location', 'Location CLLI', 'Comments' \"\\n\"]\n csvdoc = [header.join(',')]\n Node.all.each do |node|\n result = make_csoline(node)\n csvdoc << result.join(',') if result\n end\n fname = \"public/csvexports/csofeed_#{Time.now.strftime(\"%d%m%Y\")}.csv.gz\"\n File.open(fname, 'w') do |f|\n gz = Zlib::GzipWriter.new(f)\n gz.write csvdoc\n gz.close\n end\n end",
"def export(params={})\n columns = delimited_string_to_array(Settings.export.travel_fields)\n send_data Travel.export(columns), :filename => \"travel.csv\"\n end",
"def perform\n get_all_email_of_department_townhalls\n CSV.open(\"thp_free_com/db/db.csv\",\"a+\") {|csv| get_all_email_of_department_townhalls.to_a.each {|elem| csv << elem} }\n puts \"done envoyé vers un csv\"\n end",
"def save_students\n file = File.open(\"students.csv\", \"w\")\n @students.each do |student|\n student_data = [student[:name], student[:cohort], student[:hobby], student[:country]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend",
"def index\n @pagetitle = \"Suppliers\"\n \n @companies = Company.all\n\n @path = 'suppliers'\n\n @suppliercsv = Supplier.all \n respond_to do |format|\n format.html\n format.csv { send_data @suppliercsv.to_csv }\n \n end\n\n\n end",
"def import_csv_full\n \n end",
"def save_vix_future_data(year, month, directory, force_download = false)\n force_download = force_download || year > Today.year || (year == Today.year && month >= Today.month) # we want to re-download files for contracts that haven't expired yet\n \n month_code = MonthToMonthCode[month]\n year_suffix = year.to_s[-2..-1]\n file_name = \"CFE_#{month_code}#{year_suffix}_VX.csv\"\n file_path = File.join(directory, file_name)\n \n if File.exists?(file_path) && !force_download\n puts \"File #{file_path} already exists. Skipping.\"\n else\n url = \"http://cfe.cboe.com/Publish/ScheduledTask/MktData/datahouse/#{file_name}\"\n\n puts \"Downloading #{url}\"\n file_contents = open(url).read()\n File.open(file_path, 'w') { |file| file.write(file_contents) }\n end\n \n file_path\nrescue => e\n puts e.message\nend",
"def welldcsv\n @payment_run = PaymentRun.find(params[:id])\n \n paydates = ''\n paydates = @payment_run.payment_date.to_s(:local) unless @payment_run.payment_date.blank?\n \n send_data(@payment_run.welld_payment_file,:type => 'text/plain', :filename => \"payments-RUN#{@payment_run.id}-#{paydates}.csv\" )\n end",
"def download_csv_file_most_seached_merchants\n if (params[:start_date]&¶ms[:end_date]).blank?\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.all.order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at\n writer << csv_value.map(&:inspect).join(', ')\n writer << \"\\n\"\n end\n end\n send_file(file_name)\n else\n start_date = Date.strptime(params[:start_date], \"%m/%d/%Y\")\n end_date = Date.strptime(params[:end_date], \"%m/%d/%Y\")\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant#{start_date}_to_#{end_date}.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.where(\"DATE(created_at) >= ? AND DATE(created_at) <= ?\", start_date, end_date).order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at, merchant.updated_at\n writer << csv_value\n writer << \"\\n\"\n end\n end\n \n send_file(file_name)\n end\n end",
"def export\n @donor = Donor.order(:created_at)\n respond_to do |format|\n format.html\n format.csv { send_data @donor.as_csv, filename: \"Donors Export #{Date.today}.csv\" }\n end\n end",
"def download_movie_info(title)\n # Fetch movie data from IMBD per the title entered.\n raw_movie_data = Imdb::Search.new(title).movies.first\n\n # Organize the fetched movie data into array\n array_movie_data = []\n array_movie_data << raw_movie_data.title << raw_movie_data.year << raw_movie_data.company << raw_movie_data.genres.join(\", \").to_s << raw_movie_data.length << raw_movie_data.director << raw_movie_data.mpaa_rating << raw_movie_data.tagline << raw_movie_data.poster << raw_movie_data.release_date\n\n # Save the array into 'movies.csv' file as pipe-separated data for later access\n f = File.new('movies.csv', 'a+')\n f.puts(array_movie_data.join(\"|\"))\n f.close\n return array_movie_data\nend",
"def index\n @user_studies = UserStudy.all\n \n respond_to do |format|\n format.html\n format.csv { send_data @user_studies.to_csv }\n end\n end",
"def index\n if params[:csv]\n # generate the header line\n csv_string = 'Campers , Address ,'\n if @option.use_2nd_address?\n\tcsv_string << 'addr2,'\n end\n csv_string << 'City, State, Mail code,'\n if @option.use_country? && Country.active.count > 0\n\tcsv_string << 'Country,'\n end\n case @option.no_phones\n\twhen 1\n\t csv_string << 'Phone, '\n\twhen 2\n\t csv_string << 'Phone, 2nd Phone, '\n end\n csv_string << 'email address, last activity'+\"\\n\"\n # now for the data\n Camper.all.each do |c|\n\tcsv_string << c.full_name + ',' + c.address + ','\n\tcsv_string << (c.address2 ? c.address2 : '') + ',' if @option.use_2nd_address?\n\tcsv_string << c.city + ',' + c.state + ',' + c.mail_code + ','\n\tif @option.use_country? && Country.active.count > 0\n\t if c.country_id?\n\t csv_string << (c.country.name? ? c.country.name : '') + ','\n\t else\n\t csv_string << ','\n\t end\n\tend\n\tcsv_string << (c.phone ? c.phone : '' ) + ',' if @option.no_phones > 0\n\tcsv_string << (c.phone_2 ? c.phone_2 : '' ) + ',' if @option.no_phones > 1\n\tcsv_string << (c.email ? c.email : '' ) + ',' + c.activity.to_s + \"\\n\"\n end\n # debug csv_string\n send_data(csv_string,\n\t\t:type => 'text/csv;charset=iso-8859-1;header=present',\n\t\t:disposition => 'attachment; filename=Campers.csv') if csv_string.length\n else\n @page_title = \"Camper Report\"\n @campers = Camper.all\n end\n end",
"def save_csv\n CSV.open(@csv_file_path, \"wb\") do |csv|\n csv << [\"name\", \"description\", \"rating\", \"prep_time\", \"done\"]\n @recipes.each do |recipe|\n csv << [recipe.name, recipe.description, recipe.rating, recipe.prep_time, recipe.done?]\n end\n end\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def send_sample_file(file_name, arg=[])\n #data = args.join(',').split(',')\n file = CSV.generate do |line|\n arg.each do |element|\n line << element\n end\n end\n\n send_data(file, \n :type => 'text/csv;charset=utf-8;header=present', \n :disposition => \"attachment;filename=#{file_name}_#{Time.now.strftime('%d%m%y-%H%M')}.csv\")\n end",
"def save_csv\n CSV.open(@csv_file_path, 'wb') do |csv|\n @recipes.each do |recipe|\n csv << [recipe.name, recipe.description, recipe.rating, recipe.prep_time, recipe.tried]\n end\n end\n end",
"def save_to_csv(jobs)\n CSV.open('../docs/cryptocurrencyjobslist.csv', 'wb') do |csv|\n csv << [\"Company\", \"Title\", \"Link\", \"Location\", \"Category\"]\n for i in 0..jobs.length-1\n csv << [jobs[i].company, jobs[i].title, jobs[i].listing_url, jobs[i].location, jobs[i].category]\n end\n end\nend",
"def create_csv_for_GH(csv_data)\n\n csv_string = CSV.open(\"#{$basefile}GH.csv\", \"wb\") do |csv|\n\n csv_data.each do |hash|\n csv << hash\n\n end\n end\n end",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def export\n headers = JSON[params[:column_array]]\n rows = JSON[params[:row_array]]\n column_names = Array.new\n headers.each do |col|\n column_names << col\n end\n csv_string = CSV.generate do |csv|\n csv << column_names\n rows.each do |row|\n csv << row\n end\n end\n\n filename = params[:file_name] + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def index\n @backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @backend_tutorial_stats }\n format.csv do\n backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n data = Backend::TutorialStat.download_tutorial_stats_csv(backend_tutorial_stats) \n filename = \"tutorial_stats#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\" \n send_data(data, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename) \n end\n end \n end",
"def generateCSV()\n findCommits\n findLinesOfCode\n\n CSV.open(\"data.csv\", \"wb\") {|csv| @students.to_a.each {|elem| csv << elem} }\nend",
"def save_students\n file = File.open(\"students.csv\",\"w\")\n @students.each do |student|\n student_data = [student[:name], student[:cohort]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend",
"def download(county)\n puts \" F95BA #{county}\"\n results = @client.find :all, {\n class: '1', # 1 Residential\n query: \"(246=|A),(61=|#{county})\", #246 ListingStatus\n #A ActiveAvailable\n #61 County\n select: '157,881,10,922,924,137,261,129,246,80,61,25,1424,102,214,314,96,1,131,1329,sysid', \n search_type: 'Property'\n }\n puts \"F95BA #{results.size} listings\"\n #puts \"F95BA saving\"\n pg_save(results)\n results\n end",
"def save_students\n file = File.open(\"students.csv\", \"w\")\n @students.each do |student|\n student_data = [\n student[:name],\n student[:cohort],\n student[:pronoun],\n student[:height],\n student[:country],\n student[:hobbies]\n ]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend",
"def save_students\n# open the file to save\n file = File.open(\"students.csv\", \"w\")\n# iterate over the array of students\n @students.each do |student|\n student_data = [student[:name], student[:cohort]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend",
"def get_grant_data\n if GrantsData.exists?\n @grant_data = GrantsData.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_grants_data_#{Time.now.to_s(:db)}.csv\"\n end\n end\n else\n flash[:notice] = 'Grant Data Table Empty'\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def save_students\n file = File.open(\"Students.csv\", \"w\")\n @students.each do |student| \n student_data = [student[:name], student[:cohort], student[:hobby], student[:birth_country]]\n csv_line = student_data.join(\", \")\n file.puts csv_line\n end\n file.close\n puts \"Student list saved to 'Students.csv'\\n\".center(100)\nend",
"def update_csv\n CSV.open(@csv_file, \"w\") do |csv|\n csv << [\"id\", \"name\", \"password\", \"title\"]\n @data.each do |record|\n csv << [record.id, record.name, record.password, record.title]\n end\n end\n end",
"def to_csv()\n all = general_info_csv() + \"\\r\\n\" +\n teacher_for_csv() + \"\\r\\n\" +\n contributor_to_csv() + \"\\r\\n\" +\n published_in_csv() + \"\\r\\n\" +\n text_fields_csv()\n all\n end",
"def export_ansokan_csv\n download_csv(AdminOnly::Reports::ShfApplicationsCsvReport.new, t('.success'), t('.error'))\n end",
"def save_students\n file = File.open(\"students.csv\", \"w\")\n @students.each do |student|\n student_data = [student[:name], student[:cohort]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n file.close\nend",
"def fetch_default_benchmark_csv\n cache_path = CSV_TEST_DATA_CACHE_PATH\n\n if File.exist?(cache_path)\n writer.puts \"Cache file found at #{cache_path}.\", verbose: true\n @used_input_path = cache_path\n return File.read(cache_path)\n end\n\n writer.print 'Downloading test data file from GitHub..', verbose: true\n require 'open-uri'\n open(CSV_TEST_DATA_URL).read.tap do |csv| # rubocop:disable Security/Open\n @used_input_path = CSV_TEST_DATA_URL\n writer.puts 'done!', verbose: true\n File.write(cache_path, csv)\n writer.puts \"Wrote cache file to #{cache_path}..\", verbose: true\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n send_data @experiment.to_csv,\n filename: \"#{@experiment.id}-#{@experiment.name}.csv\", \n disposition: 'inline', type: \"multipart/related\"\n }\n end\n end",
"def report_csv_download(report)\n\t\t\tpost= { \"token\" => @token, \"report\" => report, \"xslt\" => 'csv.xsl' }\n\n\t\t\t# Get CSV report location and build post params\n\t\t\tfilename=nessus_http_request('file/xslt', post).scan(/fileName=(.*csv)/).flatten.first\n\t\t\tpost= {\"token\" => @token, 'fileName' => filename, 'step' => 2}\n\n\t\t\t# Allow for generation time\n\t\t\tRex::ThreadSafe.sleep(0.5)\n\n\t\t\t# Get CSV report\n\t\t\tfile=nessus_http_request('file/xslt/download',post)\n\n\t\t\treturn file\n\t\tend",
"def index\n @mailing_list = MailingList.find(params[:mailing_list_id]) \n \n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @customers }\n format.csv { \n csv_file = Customer.to_csv(current_user.id)\n send_data csv_file, :type => 'text/csv', :disposition => 'attachment;\nfilename=output.csv'\n\n \n\n }\n end\n end"
] | [
"0.72661763",
"0.7228881",
"0.6951488",
"0.69504035",
"0.68587995",
"0.68145156",
"0.681413",
"0.67607623",
"0.67035186",
"0.6702409",
"0.670057",
"0.6665862",
"0.6545669",
"0.6513945",
"0.646444",
"0.64043987",
"0.63994145",
"0.63984376",
"0.63079965",
"0.6307336",
"0.6306685",
"0.6281662",
"0.62803525",
"0.6278951",
"0.62626874",
"0.6231351",
"0.62235326",
"0.6214535",
"0.6189361",
"0.61783993",
"0.61586964",
"0.61502",
"0.6147504",
"0.6144812",
"0.6123989",
"0.61218077",
"0.61123943",
"0.6099162",
"0.6077017",
"0.606088",
"0.6060585",
"0.602564",
"0.60222685",
"0.60046613",
"0.6004433",
"0.5996422",
"0.5995035",
"0.5994968",
"0.59905016",
"0.59757656",
"0.5965982",
"0.5958209",
"0.5949542",
"0.5923768",
"0.5923447",
"0.5918188",
"0.5917787",
"0.5917027",
"0.59114933",
"0.5905176",
"0.5904754",
"0.5903978",
"0.58980674",
"0.5897792",
"0.5892787",
"0.5875011",
"0.5873935",
"0.5871768",
"0.5868893",
"0.58667594",
"0.5851451",
"0.58431494",
"0.58430815",
"0.58387744",
"0.5837525",
"0.5836106",
"0.5821664",
"0.5818181",
"0.5811998",
"0.58108455",
"0.58095443",
"0.58042765",
"0.58027357",
"0.579653",
"0.57870257",
"0.5786278",
"0.5784404",
"0.57796705",
"0.5779287",
"0.5778608",
"0.57780105",
"0.5771883",
"0.57710356",
"0.57662004",
"0.57648283",
"0.5760021",
"0.57598585",
"0.57504916",
"0.57476807",
"0.5745685"
] | 0.65077966 | 14 |
downloads csv of grant data for individual to users computer | def get_individual_grant
individual = User.find(params[:charity_id])
if GrantsData.exists?(userID: params[:charity_id])
csv_data = GrantsData.particular_csv(individual.id)
respond_to do |format|
format.html
format.csv do
send_data csv_data, filename: "individual_grants_data#{Time.now.to_s(:db)}.csv"
end
# format.CSV {render csv: @grant_data.to_csv}
# format.xls {render text: @grant_data.to_csv(col_sep: "\t")}
end
else
flash[:notice] = 'No Grant Data for ' + individual.charity_name
redirect_to :controller => 'amrc_reports', :action => 'grants_individual'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def download_csv\n case current_user.role.name\n when 'Edutor Admin'\n usages = Usage\n when 'Institute Admin'\n usages = current_user.usages\n when 'Center Representative'\n usages = current_user.usages\n when 'Teacher'\n class_contents = current_user.class_contents\n section_students = current_user.sections.map{|section| section.students.select('id')}.flatten\n student_group_students = current_user.sections.map{|section| section.student_groups.select('id')}.flatten\n total_students = (section_students + student_group_students).uniq\n usages_ids = class_contents.map(&:uri).map{|uri|\n Usage.where('uri like ?',\"%#{uri}%\").where(:user_id=>total_students).map(&:id)\n }\n usages = Usage.where(:id=>usages_ids)\n end\n filename =\"usages_#{Date.today.strftime('%d%b%y')}\"\n csv_data = FasterCSV.generate do |csv|\n csv << Usage.csv_header\n usages.each do |c|\n csv << c.to_csv\n end\n end\n send_data csv_data, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{filename}.csv\"\n end",
"def get_grant_data\n if GrantsData.exists?\n @grant_data = GrantsData.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_grants_data_#{Time.now.to_s(:db)}.csv\"\n end\n end\n else\n flash[:notice] = 'Grant Data Table Empty'\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def csv_download\n filename = sprintf(\"%s-%s.csv\",\n model_name,\n Time.now.strftime(\"%Y%m%d-%H%M%S\"))\n file = Tempfile.new(filename, Settings.edgarj.csv_dir)\n csv_visitor = EdgarjHelper::CsvVisitor.new(view_context)\n file.write CSV.generate_line(model.columns.map{|c| c.name})\n for rec in user_scoped.where(page_info.record.conditions).\n order(\n page_info.order_by.blank? ?\n nil :\n page_info.order_by + ' ' + page_info.dir) do\n array = []\n for col in model.columns do\n array << csv_visitor.visit_column(rec, col)\n end\n file.write CSV.generate_line(array)\n end\n file.close \n File.chmod(Settings.edgarj.csv_permission, file.path)\n send_file(file.path, {\n type: 'text/csv',\n filename: filename})\n #file.close(true)\n end",
"def download\n if current_user\n filepath = params[:file_path]\n send_file(filepath,filename:filepath.split('/')[-1],type:'application/csv' ,status:202)\n else\n return render 'shared/result',locals:{status:false, error:\"未授權\"}\n end\n end",
"def export\n fn = current_user.name + \"'s blood glucose readings.csv\"\n respond_to do |format|\n format.csv { send_data(BgMeasurement.to_csv(current_user), :filename => fn) }\n end\n end",
"def csv_download\n @stats = Hyrax::WorkUsage.new(params[:id])\n filename = params[:id] + \"_stats.csv\"\n #This is an example that worked\n #send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => 'attachment; filename=payments.csv'\n target = \"attachment`; filename=#{filename}\"\n send_data @stats.to_csv, :type => 'text/csv; charset=utf-8; header=present', :disposition => target\n end",
"def get_unsubmitted\n @grant_data = User.to_csv_unsubmitted\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"unsubmitted_charity_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def generate_csv\n @project = Project.find(params[:id])\n \n content_type = if request.user_agent =~ /windows/i\n ' application/vnd.ms-excel '\n else\n ' text/csv '\n end\n \n project_net = @project.find_all_connections(friend = true, follower = false) \n \n CSV::Writer.generate(output = \"\") do |csv|\n csv << [\"DL n=\" + @project.persons.count.to_s ]\n csv << [\"format = edgelist1\"]\n csv << [\"labels embedded:\"]\n csv << [\"data:\"]\n project_net.each do |entry|\n csv << [entry[0], entry[1], \"1\"]\n end\n @project.persons.each do |person|\n csv << [person.username]\n end\n end\n send_data(output,\n :type => content_type,\n :filename => @project.name.to_s + \"_FF_SNA.csv\")\n end",
"def download_csv\n query = params[:query]\n company_id = params[:company_id]\n file = Operation.create_csv(query, company_id) \n send_file file\n end",
"def csv\n claim_filter = ClaimFilter.new()\n claim_filter.restore_from_session(session,:claim_filter)\n claim_filter.organisation_id = params[:id].to_i\n\n content_type = ( request.user_agent =~ /windows/i ? 'application/vnd.ms-excel' : 'text/csv' )\n content = Claim.csv_by_filter(claim_filter)\n send_data(content,:type => content_type, :filename => 'claims.csv' )\n end",
"def download\n grade_entry_form = record\n send_data grade_entry_form.export_as_csv(current_role),\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def show\n\t\t#@data = CSV.generate do |csv|\n\t\t#\t(1..2000).each_with_index do |user, index|\n\t\t#\t\tcsv << [\"MA_user\" + \"#{index+1}\" + \"@cb.com\", \"12345678\"]\n\t\t#\tend\n\t\t#end\n\t\t#send_data(@data, :type => 'text/csv; charset=utf-8; header=present', :filename => \"MA_user_2000.csv\")\n end",
"def download_grader_students_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n students = Student.all\n\n file_out = CsvHelper::Csv.generate do |csv|\n students.each do |student|\n # csv format is student_name, ta1_name, ta2_name, ... etc\n student_array = [student.user_name]\n grade_entry_student = grade_entry_form.grade_entry_students.find_by_user_id(student.id)\n unless grade_entry_student.nil?\n grade_entry_student.tas.each { |ta| student_array.push(ta.user_name) }\n end\n\n csv << student_array\n end\n end\n\n send_data(file_out, :type => 'text/csv', :disposition => 'inline')\n end",
"def to_csv_dash_data\n @grant_data = dash_data_create\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"all_dashboard_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => @@default_sort\n collection = @@model.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"#{@@param_name}_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << @@name_print.map{|n| n.trn}\n collection.each do |element|\n csv << @@field_print.map{|f| element[f]}\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_competitor_list_ssv\n exporter = Exporters::Competition::Simple.new(@competition)\n csv_string = CSV.generate(col_sep: \";\") do |csv|\n csv << exporter.headers\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def download_provider\n provider_app_detail_ids = ProviderAppDetail.where(fk_audit_trail_id: params[:audit_id]).pluck(:sys_provider_app_detail_id)\n @providers = Provider.where(\"fk_provider_app_detail_id in (?)\", provider_app_detail_ids)\n reg_app = AuditTrail.find(params[:audit_id]).registered_app\n\n respond_to do |format|\n format.html\n format.csv { send_data @providers.to_csv(reg_app, {}), :type => 'text/csv; charset=utf-8; header=present',\n :disposition => \"attachment; filename= #{reg_app.app_name}_Providers_#{DateTime.now.to_s}.csv\" }\n end\n end",
"def download_ta_list\n #find all the users\n tas = Ta.order(:user_name)\n case params[:format]\n when 'csv'\n output = User.generate_csv_list(tas)\n format = 'text/csv'\n when 'xml'\n output = tas.to_xml\n format = 'text/xml'\n else\n # Raise exception?\n output = tas.to_xml\n format = 'text/xml'\n end\n send_data(output, type: format, disposition: 'inline')\n end",
"def download_competitor_list\n exporter = Exporters::Competition::Swiss.new(@competition, nil)\n csv_string = CSV.generate(col_sep: \"\\t\") do |csv|\n csv << exporter.headers if exporter.headers.present?\n exporter.rows.each do |row|\n csv << row\n end\n end\n\n filename = \"competitors_#{@competition.to_s.parameterize}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => 'es_setups.path, es_setups.name'\n setups = EsSetup.find :all, :order => @sort, :conditions => session[:conditions_setup]\n # Creation of the file\n file_name = \"setups_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Chemin\".trn,\"Nom\".trn,\"Valeur\".trn,\"Type\".trn, \"Lecture seule\".trn]\n setups.each do |t|\n csv << [t.path,t.name,t.value,t.type_data,(t.read_only=='Y' ? 'V' : '')]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def personnel_data\n users = User.all.compact.uniq\n data = generate_csv(users)\n render :template => \"shared/csv_data\", :locals => {:data => data}, :layout => false\n end",
"def write_csv\n\t\tCSV.open(\"csv_exports/edits_per_user_#{@dataset}.csv\", 'w') do |csv|\n\t\t\tcsv << ['user', 'edits']\n\t\t\t@users.each do |user, edits|\n\t\t\t\tcsv << [user,edits]\n\t\t\tend\n\t\tend\n\tend",
"def grader_mapping\n grade_entry_form = GradeEntryForm.find(params[:grade_entry_form_id])\n\n students = Student.left_outer_joins(:user, grade_entry_students: [tas: :user])\n .where('grade_entry_students.assessment_id': grade_entry_form.id)\n .order('users.user_name', 'users_roles.user_name')\n .pluck('users.user_name', 'users_roles.user_name')\n .group_by { |x| x[0] }\n .to_a\n\n file_out = MarkusCsv.generate(students) do |student, graders|\n [student] + graders.pluck(1)\n end\n\n send_data file_out,\n type: 'text/csv',\n disposition: 'attachment',\n filename: \"#{grade_entry_form.short_identifier}_grader_mapping.csv\"\n end",
"def csv_data\n case\n when google_key || url then Curl::Easy.perform(uri).body_str\n when file then File.open(uri).read\n end\n end",
"def index\n @mobiles = Customer.pluck(:mobile)\n @agents = User.where(\"role = ?\", 2).pluck(:fname, :id)\n @franchises = User.where(\"role = ?\", 1).pluck(:fname, :id)\n @customers_csv = (policy_scope Customer).order(\"id desc\")\n @customers = (policy_scope Customer).order(\"id desc\").page params[:page]\n authorize Customer.new, :index?\n \n respond_to do |format|\n format.html\n format.csv { send_data @customers_csv.as_csv }\n end\n\n\n end",
"def export_files\n begin\n file_to_download = \"sample_non_compliance_question.csv\"\n send_file Rails.public_path + file_to_download, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=#{file_to_download}\", :stream => true, :buffer_size => 4096\n rescue\n flash[:error] = MESSAGES[\"csv_export\"][\"error\"]\n redirect_to new_audit_path\n end\n end",
"def show\n respond_to do |format|\n format.html\n format.csv {\n @data = @dataset.data\n send_data @data, \n :type => 'text/csv; charset=iso-8859-1; header=present', :stream => true,\n :disposition => \"attachment; filename=#{@dataset.user.student_number}_#{Date.today.strftime('%Y%m%d')}.csv\"\n }\n end\n end",
"def download\n grade_entry_form = GradeEntryForm.find(params[:id])\n send_data grade_entry_form.export_as_csv,\n disposition: 'attachment',\n type: 'text/csv',\n filename: \"#{grade_entry_form.short_identifier}_grades_report.csv\"\n end",
"def csv_export\n if can?(:>, \"4\")\n directory= APP_CONFIG[\"csv_export_path\"]\n @record = KitBomBulkOperation.find_by_id(params[:id])\n export_path=\"Response_#{@record.id}_cup_count_#{@record.file_path.gsub(\".csv\",\"\")}.csv\"\n if File.exist?(File.join(directory,export_path))\n send_file File.join(directory,export_path), :disposition => \"attachment\"\n else\n flash[:error] = \"Something went Wrong Response File Not Found/Try Uploading a New File.\"\n redirect_to upload_parts_path\n end\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def export_csv(csv_data)\n\t\tFile.write(\"kfit_partners.csv\", csv_data.map(&:to_csv).join)\n\tend",
"def export_students_to_csv\n CSV.generate(headers: true) do |csv|\n csv << %w[perm email first_name last_name github_username]\n\n roster_students.each do |user|\n csv << [\n user.perm,\n user.email,\n user.first_name,\n user.last_name,\n user.username\n ]\n end\n end\n end",
"def get_rex\n if Project.exists?\n @grant_data = Project.all_csv\n respond_to do |format|\n format.html\n format.csv do\n send_data @grant_data, filename: \"research_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:success] = \"Research Expenditure Table is Empty\"\n redirect_to :controller => 'amrc_reports', :action => 'reports'\n end\n end",
"def csv\n @records = Reply.all\n csv_string = FasterCSV.generate do |csv|\n csv << %w{Code Name Email Engagement Engagement_Adults Engagement_Children Wedding Wedding_Adults Wedding_Children Camping Diet Notes Updated}\n @records.each do |line|\n csv << [\n line['code'],\n line['name'], \n line['email'],\n line['engagement'],\n line['engagement_adults'],\n line['engagement_children'],\n line['wedding'],\n line['wedding_adults'],\n line['wedding_children'],\n line['camping'],\n line['diet'],\n line['notes'],\n line['updated_at']\n ]\n end\n end\n filename = \"rsvp_download\" + Time.now.strftime(\"%d%m%y-%H%M\").to_s + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def export\n @transactions = Transaction.find_all_by_user_id(current_user.id)\n csv = \"\"\n i = 0\n @transactions.each do |trans|\n if (i==0)\n csv += trans.to_csv(true)\n else\n csv += trans.to_csv(false)\n end\n i += 1\n end\n\n respond_to do |format|\n format.csv { send_data csv }\n end\n end",
"def download_sample\n if can?(:>, \"4\")\n send_file Rails.public_path+\"/excel/Import/sample_file_part_upload.csv\", :disposition => \"attachment\"\n else\n redirect_to main_app.unauthorized_url\n end\n end",
"def export_csv\n # Find all Setup with the stored restrictions\n sorting :default => \"name\"\n collection = PostitTask.find :all, :order => @sort, :conditions => session[:conditions]\n # Creation of the file\n file_name = \"postit_task_export_\" + current_user.id.to_s + \".csv\"\n \n csv_string = CSV.generate({:col_sep => ';', :encoding => \"ISO-8859-15\" }) do |csv|\n csv << [\"Nom\".trn,\"Description\".trn,\"Séquence\".trn]\n collection.each do |element|\n csv << [element.name,element.description,element.sequence]\n end\n end\n send_data Iconv.conv('iso-8859-1//IGNORE', 'utf-8', csv_string), :filename => file_name, :disposition => 'attachment', :type => 'text/csv; charset=iso-8859-1; header=present'\n end",
"def download_heat_tsv\n heat = params[:heat_number]\n exporter = Exporters::Competition::Swiss.new(@competition, heat)\n csv_string = TsvGenerator.new(exporter).generate\n\n filename = \"#{@competition.to_s.parameterize}_heat_#{heat}.txt\"\n send_data(csv_string,\n type: 'text/csv; charset=utf-8; header=present',\n filename: filename)\n end",
"def get_individual\n individual = User.find(params[:charity_id])\n if Project.exists?(userID: params[:charity_id])\n csv_data = Project.particular_csv(individual.id)\n respond_to do |format|\n format.html\n format.csv do\n send_data csv_data, filename: \"individual_research_expenditure_data#{Time.now.to_s(:db)}.csv\"\n end\n # format.CSV {render csv: @grant_data.to_csv}\n # format.xls {render text: @grant_data.to_csv(col_sep: \"\\t\")}\n end\n else\n flash[:notice] = 'No Research Data for ' + individual.charity_name\n redirect_to :controller => 'amrc_reports', :action => 'research_individual'\n end\n end",
"def index\n @experiments = policy_scope Experiment.where(user_id: current_user.id).order(created_at: :desc).page(params[:page])\n respond_to do |format|\n format.html\n format.csv {\n send_data( @experiments.to_csv,\n filename: \"CO2_by_experiment_#{Time.zone.now}.csv\", \n disposition: 'inline', type: \"multipart/related\")\n }\n end\n end",
"def genupload data\n CSV.open(\"update-data.csv\", \"wb\") do |csv|\n csv << @uhead\n data.each { |a| csv << a }\n end\nend",
"def hv_export_users(oHarvest, iDbg = 0)\n\n\tusers = oHarvest.users.all\n\n summary = Array.new()\n\n\tusers.each do |u|\n if (u.is_active == true && u.is_admin == false)\n p_user = Array.new(5)\n p_user[0] = u.first_name\n p_user[1] = u.id\n p_user[2] = u.email\n p_user[3] = u.department\n p_user[4] = u.last_name\n \n summary.push(p_user)\n end\n end\n\n summary = summary.sort { |x, y|\n x[0] <=> y[0]\n }\n\n file = get_config(\"COMMON\",\t\"CSVPath\") + get_config(\"Harvest\", \"MUsers\")\n flush_to_csv(summary, file, true)\n\nend",
"def csv_setup\n # Set filename\n filename = \"ministry_partners.csv\" \n\n #this is required if you want this to work with IE \n if request.env['HTTP_USER_AGENT'] =~ /msie/i\n headers['Pragma'] = 'public'\n headers[\"Content-type\"] = \"text/plain\" \n headers['Cache-Control'] = 'no-cache, must-revalidate, post-check=0, pre-check=0'\n headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Expires'] = \"0\" \n else\n headers[\"Content-Type\"] ||= 'text/csv'\n headers[\"Content-Disposition\"] = \"attachment; filename=\\\"#{filename}\\\"\" \n headers['Cache-Control'] = ''\n end\n end",
"def export_users(file)\n users = User.all\n\n csv_string = FasterCSV.generate do |csv|\n users.each do |user|\n csv << [user.email, user.first_name, user.last_name]\n end\n end\n\n send_data csv_string,\n :type => 'text/csv; charset=iso-8859-1; header=present',\n :disposition => \"attachment; filename=users.csv\"\n File.open(file,\"w\").write\n end",
"def download\n rows = [ ['TVs:'],\n %w[id make model] ] +\n @tvs.all.collect { |tv| [tv.id, tv.make, tv.model] } +\n [ [],\n ['Remotes:'],\n %w[id name description] ] +\n @remotes.all.collect { |remote| [remote.id, remote.name, remote.description] }\n send_csv(rows.collect(&:to_csv).join, 'TVs_and_remotes')\n end",
"def export\n #@contacts = Contact.all\n send_data Contact.to_csv(@contacts),\n :filename => 'addressbook.csv',\n :type => 'text/csv; charset=utf-8',\n :disposition => 'attachment'\n end",
"def csv\n send_data(Map.to_csv, {:filename => \"maps.csv\" })\n end",
"def export_csv\n\n group_array = []\n @page = 1\n @per_page = 50\n\n groups = @context.get(:groups, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n group_array << groups\n group_array, group_hash = check_paging(groups, group_array, \"groups\", @context, true)\n\n group_array.each_with_index do |group, index|\n is_new = index == 0 ? true : false\n membership_array = []\n @page = 1\n\n group_model = Group.find(group['id'], :params => { :access_token => ENV[\"API_TOKEN\"] })\n memberships = group_model.get(:memberships, :page => @page, :per_page => @per_page, :access_token => ENV[\"API_TOKEN\"])\n membership_array << memberships\n membership_array, @membership_hash = check_paging(memberships, membership_array, \"memberships\", group_model, is_new)\n end\n\n export_data = [group_array, @membership_hash]\n perform_export(export_data) \n\n respond_to do |format|\n format.html { render :inline => \"<a href=<%= @download_url %>>Download CSV</a>\" }\n format.json { render :json => @download_url.to_json }\n end\n end",
"def export\n send_data current_user.records.select(:json).order('updated_at desc').collect(&:json).to_json, filename: 'records.json'\n end",
"def admin_index\n authorize User\n\n respond_to do |format|\n format.html do\n @clicked_through = params[:click_through].present?\n @filter_admin = false\n\n @users = if current_user.can_super_admin?\n User.includes(:department, :org, :perms, :roles, :identifiers).page(1)\n else\n current_user.org.users\n .includes(:department, :org, :perms, :roles, :identifiers)\n .page(1)\n end\n end\n\n format.csv do\n send_data User.to_csv(current_user.org.users.order(:surname)),\n filename: \"users-accounts-#{Date.today}.csv\"\n end\n end\n end",
"def fee_reciepts_export_csv\n parameters={:search => params[:search] ,:filename => filename}\n csv_export('finance_transaction', 'fee_reciepts_export', parameters) \n end",
"def get_harvest_users(harvest_webapp, dbg = 0)\n\n member_page = \"/team\"\n pt = get_config(\"COMMON\", \"CSVPath\")\n um = get_config(\"Harvest\", \"MUsers\")\n\n site = harvest_webapp\n\n begin\n \n # move to User list\n site.Go(member_page)\n \n # save user list to csv\n data = site.RetrieveList(\"li.manage-list-item\", method(:proc_split_list_to_array))\n\n p data if dbg\n \n # append email and dept_code\n data.each do |member|\n if (member[0] != \"Admin\")\n site.Go(\"/people/\" + member[1] + \"/edit#profile_base\")\n member.push(site.GetItem(\"#user_email\").attribute(\"value\").value.strip);\n member.push(site.GetItem(\"#user_department\").attribute(\"value\").value.strip);\n else\n member.push(\"\");\n member.push(\"\");\n end\n end\n \n # sort by dept_code\n sorted = data.sort { |a, b|\n a[3] <=> b[3]\n }\n \n # flush to file\n flush_to_csv(sorted, pt + um)\n \n p (pt + um) if dbg\n \n rescue => e\n\n p e\n p e.backtrace\n \n end\n \nend",
"def table_1\n @table1 = read_table(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"))\n #send_file(File.join(FILE_PATH, \"Lgr_prelim_FPKM.txt\"), :type => 'text/csv', :disposition => 'inline')\n end",
"def export\n @fans = Fan.active.find(:all)\n\n csv_string = FasterCSV.generate do |csv|\n csv << [\"Name\", \"E-mail\", \"Notes\", \"Section 1 - Description\", \"Section 1 - Email\", \"Section 1 - IM\", \"Section 1 - Phone\", \"Section 1 - Mobile\", \"Section 1 - Pager\", \"Section 1 - Fax\", \"Section 1 - Company\", \"Section 1 - Title\", \"Section 1 - Other\", \"Section 1 - Address\"]\n\n for fan in @fans\n csv << [fan.name,\n fan.email,\n \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\",\n fan.address \n ]\n end\n end\n\n # filename = @list.name.downcase.gsub(/[^0-9a-z]/, \"_\") + \".csv\"\n filename = \"fans.csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def export\n result = Urlmaster.all\n head = 'EF BB BF'.split(' ').map{|a|a.hex.chr}.join()\n exportFile = CSV.generate(csv = head) do |writer|\n writer << [\"mapId\", \"venueName\", \"floor\", \"typeMap\", \"venueFloorMapImageUrl\", \"venueFloorMapUrl\"]\n result.each do |r|\n writer << [r[:mapId], r[:venueName], r[:floor], r[:typeMap], r[:venueFloorMapImageUrl], r[:venueFloorMapUrl]]\n end\n end\n send_data exportFile, filename: \"MapCrawler-#{Time.now.in_time_zone(\"Asia/Tokyo\").strftime(\"%y%m%d%H%M%S\")}.csv\", type: \"text/csv\"\n # redirect_to crawler_path\n end",
"def get_csv\n CSV.generate do |csv|\n csv << ['Name','Student ID','User ID','Role','Email Address','Sections']\n get_feed[:students].each do |student|\n name = student[:last_name] + ', ' + student[:first_name]\n user_id = student[:login_id]\n student_id = student[:student_id]\n email_address = student[:email]\n role = ENROLL_STATUS_TO_CSV_ROLE[student[:enroll_status]]\n sections = sections_to_name_string(student[:sections])\n csv << [name, student_id, user_id, role, email_address, sections]\n end\n end\n end",
"def generate_csv_file(file_path, row_data)\n CSV.open(file_path, \"wb\") do |csv|\n csv << [\"first_name\", \"last_name\", \"dob\", \"member_id\", \"effective_date\", \"expiry_date\", \"phone_number\"]\n row_data.each { |row| csv << row }\n end\nend",
"def download\r\n\r\n @restricted_fields = []\r\n @no_menubar\t= true\r\n @no_links\t= true\r\n @no_filterbar\t= true\r\n @no_row_links\t= true\r\n pagin_opts\t\t = {:include => [:customers,:user_accesses,:profiles,:dev_feedbacks,:file_imports]}\r\n pagin_opts[:order]\t = @default_order if @default_order\r\n pagin_opts[:order]\t||= \"users.#{params[:sort_by]}\" if !params[:sort_by].blank?\r\n pagin_opts[:conditions] ||= @default_filter\r\n pagin_opts[:joins]\t||= @joins_fields || []\r\n\r\n # pagin_opts[:select]\t||= \"`users`.`login`,`users`.`first_name`,`users`.`last_name`,`users`.`email`,`users`.`telephone`,`users`.`language`,`users`.`active`,`users`.`salt`\" unless params[:format] == 'html'\r\n\r\n if params[:id] && params[:id].to_i > 0\r\n @user = User.find_by_id(params[:id], pagin_opts)\r\n if !@user\r\n flash[:warning] = _(\"Error: %{obj} not found!\") % {:obj => _(%q[User])}\r\n begin\r\n redirect_to :back\r\n rescue\r\n redirect_to :action => :list\r\n end\r\n return\r\n end\r\n f_name = @user.disp_name\r\n respond_to do |format|\r\n format.html {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/show.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"#{f_name}.html\", :disposition => 'attachment', :type => 'text/html;charset=utf-8') }\r\n format.doc {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/show.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"#{f_name}.doc\", :disposition => 'attachment', :type => 'application/msword;charset=utf-8') }\r\n format.pdf {\r\n @pdf = true\r\n @debug_pdf = params[:debug_pdf]\r\n params[:format] = 'html'\r\n html = render_to_string(:template => '/users/show.html.erb', :format => :html, :id => @user.id, :layout => 'pdf')\r\n html.gsub!(/\\/images\\//, Rails.root.join('public', 'images/')) if !params[:debug_pdf]\r\n render(:text => html, :layout => 'pdf') and return if params[:debug_pdf]\r\n kit = PDFKit.new(html, :encoding => 'UTF-8')\r\n kit.stylesheets << Rails.root.join('public', 'stylesheets', 'pdf.css')\r\n pdf = kit.to_pdf\r\n send_data(pdf, :filename => \"#{@user.disp_name}.pdf\") unless params[:debug_pdf] || pdf.blank?\r\n\r\n # send_data(render_to_string(:format => :html, :layout => false, :action => :show, :id => @user.id), :filename => \"#{f_name}.pdf\", :disposition => 'attachment', :type => 'application/pdf;charset=utf-8')\r\n\r\n return\r\n }\r\n format.xml {\r\n send_data(@user.to_xml, :filename => \"#{f_name}.xml\", :disposition => 'attachment', :type => 'text/xml;charset=utf-8')}\r\n format.json {\r\n send_data(@user.to_json, :filename => \"#{f_name}.json\", :disposition => 'attachment', :type => 'text/json;charset=utf-8')}\r\n format.xls {\r\n book = Spreadsheet::Workbook.new\r\n sheet = book.create_worksheet(:name => \"users\")\r\n sheet.row(0).concat([\"Login\", \"First Name\", \"Last Name\", \"Email\", \"Telephone\", \"Language\", \"Active\", \"Salt\", \"User Accesses\", \"Profiles\"])\r\n sheet.row(1).replace([@user.login, @user.first_name, @user.last_name, @user.email, @user.telephone, @user.language, @user.active, @user.salt, @user.user_accesses.map(&:disp_name).to_sentence, @user.profiles.map(&:disp_name).to_sentence])\r\n fname = \"users.xls\"\r\n tmp = Tempfile.new(fname)\r\n book.write(tmp.path)\r\n tmp.close\r\n send_file(tmp.path, :filename => fname)\r\n }\r\n format.csv { row = @user\r\n @csv_string = FasterCSV.generate({:encoding => 'UTF-8', :col_sep => (SystemSetting['csv_export_separator'] || ';')}) do |csv|\r\n cols = []\r\n User::FIELDS.each{|k,v| cols << k if [:string, :text, :integer, :float, :date, :time, :datetime, :timestamp, :ho_assoc, :bt_assoc].include? v}\r\n cols.reject!{|c| [:customers, :accesses, :dev_feedbacks, :file_imports].include?(c) }\r\n csv << cols.map{|c|\r\n if [:ho_assoc, :bt_assoc].include?(User::FIELDS[c])\r\n v = row.send(c) ; v ? v.disp_name : nil\r\n else\r\n row.send(c)\r\n end\r\n }\r\n end\r\n @export_encoding ||= SystemSetting['csv_export_encoding'] || 'UTF-16LE'\r\n conv = Iconv.new(@export_encoding, 'UTF-8')\r\n send_data(conv.iconv(@csv_string), :filename => \"#{f_name}.csv\", :disposition => 'attachment', :type => \"text/csv;charset=#{@export_encoding.downcase}\")\r\n return\r\n }\r\n end\r\n else\r\n pagin_opts[:page] = 1\r\n pagin_opts[:per_page] = User.count+1\r\n @users = User.paginate(pagin_opts)\r\n respond_to do |format|\r\n format.html {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/list.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"users.html\", :disposition => 'attachment', :type => 'text/html;charset=utf-8') }\r\n format.doc {\r\n @no_menubar = true\r\n @no_links = true\r\n data = render_to_string(:template => '/users/list.html.erb', :layout => 'minimal').gsub(/\\ssrc=\\\"\\//, %Q[ src=\"#{request.protocol}#{request.host_with_port}/])\r\n send_data(data, :filename => \"users.doc\", :disposition => 'attachment', :type => 'application/msword;charset=utf-8') }\r\n format.pdf {\r\n @pdf = true\r\n @debug_pdf = params[:debug_pdf]\r\n params[:format] = 'html'\r\n html = render_to_string(:template => '/users/list.html.erb', :layout => 'pdf')\r\n html.gsub!(/\\/images\\//, Rails.root.join('public', 'images/')) if !params[:debug_pdf]\r\n render(:text => html, :layout => 'pdf') and return if params[:debug_pdf]\r\n kit = PDFKit.new(html, :encoding => 'UTF-8')\r\n kit.stylesheets << Rails.root.join('public', 'stylesheets', 'pdf.css')\r\n pdf = kit.to_pdf\r\n send_data(pdf, :filename => \"users.pdf\") unless params[:debug_pdf] || pdf.blank?\r\n\r\n # send_data(render_to_string(:layout => false, :action => :list), :filename => \"users.pdf\", :disposition => 'attachment', :type => 'application/pdf;charset=utf-8')\r\n\r\n }\r\n format.xml {\r\n send_data(@users.to_xml, :filename => ('users.xml'), :disposition => 'attachment', :type => 'text/xml;charset=utf-8')}\r\n format.xls {\r\n book = Spreadsheet::Workbook.new\r\n sheet = book.create_worksheet(:name => \"users\")\r\n sheet.row(0).concat([\"Login\", \"First Name\", \"Last Name\", \"Email\", \"Telephone\", \"Language\", \"Active\", \"Salt\", \"User Accesses\", \"Profiles\"])\r\n @users.each_with_index do |row,i|\r\n sheet.row(i+1).replace([row.login, row.first_name, row.last_name, row.email, row.telephone, row.language, row.active, row.salt, row.user_accesses.map(&:disp_name).to_sentence, row.profiles.map(&:disp_name).to_sentence])\r\n end\r\n fname = \"users.xls\"\r\n tmp = Tempfile.new(fname)\r\n book.write(tmp.path)\r\n tmp.close\r\n send_file(tmp.path, :filename => fname)\r\n }\r\n format.csv {\r\n @csv_string = FasterCSV.generate({:encoding => 'UTF-8', :col_sep => (SystemSetting['csv_export_separator'] || ';')}) do |csv|\r\n cols = []\r\n User::FIELDS.each{|k,v| cols << k if [:string, :text, :integer, :float, :date, :time, :datetime, :timestamp, :ho_assoc, :bt_assoc].include? v}\r\n cols.reject!{|c| [:customers, :accesses, :dev_feedbacks, :file_imports].include?(c) }\r\n csv << cols.map{|c| _(c.titleize)}\r\n @users.map{|row|\r\n csv << cols.map {|c|\r\n if [:ho_assoc, :bt_assoc].include?(User::FIELDS[c])\r\n v = row.send(c) ; v ? v.disp_name : nil\r\n else\r\n row.send(c)\r\n end\r\n }\r\n }\r\n end\r\n @export_encoding ||= SystemSetting['csv_export_encoding'] || 'UTF-16LE'\r\n conv = Iconv.new(@export_encoding, 'UTF-8')\r\n send_data(conv.iconv(@csv_string), :filename => \"users.csv\", :disposition => 'attachment', :type => \"text/csv;charset=#{@export_encoding.downcase}\")\r\n }\r\n end\r\n end\r\n end",
"def save_students\n file = CSV.open(@user_file, \"w\") do |csv|\n @students.each do |student|\n student_data = [student[:name], student[:cohort]]\n csv_line = student_data.join(\",\")\n csv << csv_line\n end\n end\nend",
"def main()\n request_url = \"#{$canvas_url}/api/v1/users/#{$canvas_user_id}/page_views?per_page=100&start_time=#{$start_time}&end_time=#{$end_time}\"\n method = \"get\"\n options = {}\n data = canvasApiRequest(method,request_url,options)\n compiledHash = []\n data.each do |hash|\n hashData = flattenHash(hash)\n compiledHash.push(hashData)\n end\n outputToCSV(compiledHash)\nend",
"def send_sample_file(file_name, arg=[])\n #data = args.join(',').split(',')\n file = CSV.generate do |line|\n arg.each do |element|\n line << element\n end\n end\n\n send_data(file, \n :type => 'text/csv;charset=utf-8;header=present', \n :disposition => \"attachment;filename=#{file_name}_#{Time.now.strftime('%d%m%y-%H%M')}.csv\")\n end",
"def index\n authorize! :index, Walkathon::Pledge.new\n @walkathon_pledges = Walkathon::Pledge.includes(:student)\n if params[:student_id]\n @walkathon_pledges = @walkathon_pledges.where(student_id: params[:student_id])\n @student = Student.find(params[:student_id])\n end\n respond_to do |format|\n format.html\n format.csv do\n headers['Content-Disposition'] = \"attachment; filename=\\\"pledges.csv\\\"\"\n headers['Content-Type'] ||= 'text/csv'\n end\n end\n end",
"def download_csv_file_most_seached_merchants\n if (params[:start_date]&¶ms[:end_date]).blank?\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.all.order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at\n writer << csv_value.map(&:inspect).join(', ')\n writer << \"\\n\"\n end\n end\n send_file(file_name)\n else\n start_date = Date.strptime(params[:start_date], \"%m/%d/%Y\")\n end_date = Date.strptime(params[:end_date], \"%m/%d/%Y\")\n header = 'Merchant Id, Merchant Name, Created On, last time Searched'\n file_name = \"Most_searched_merchant#{start_date}_to_#{end_date}.csv\"\n File.open(file_name, \"w\") do |writer|\n writer << header\n writer << \"\\n\"\n Merchant.where(\"DATE(created_at) >= ? AND DATE(created_at) <= ?\", start_date, end_date).order('view_counter desc').each do |merchant|\n csv_value = merchant.id, merchant.try(:name), merchant.created_at, merchant.updated_at\n writer << csv_value\n writer << \"\\n\"\n end\n end\n \n send_file(file_name)\n end\n end",
"def export\n @donor = Donor.order(:created_at)\n respond_to do |format|\n format.html\n format.csv { send_data @donor.as_csv, filename: \"Donors Export #{Date.today}.csv\" }\n end\n end",
"def download_response_files!\n files_downloaded = []\n File.makedirs(cache_location + '/returns')\n with_ftp do |ftp|\n files = ftp.list('*.csv')\n files.each do |filels|\n size, file = filels.split(/ +/)[4], filels.split(/ +/)[8..-1].join(' ')\n ftp.get(file, cache_location + '/returns/' + user_suffix + '_' + file)\n files_downloaded << file\n end\n end\n files_downloaded\n end",
"def show\n respond_to do |format|\n format.html\n format.csv { send_data @cycle.to_csv, filename: \"users-#{Date.today}.csv\" }\n end\n end",
"def fetch_csv\n convert_to_csv(fetch)\n end",
"def show\n respond_to do |format|\n format.html\n format.json\n format.csv {\n send_data(to_csv(@user), filename: @user.name + '_history.csv')\n }\n end\n end",
"def save_students\n File.open(user_filename, \"w\") do |file|\n @students.each do |student|\n student_data = [student[:name], student[:cohort], student[:hobbies]]\n csv_line = student_data.join(\",\")\n file.puts csv_line\n end\n end\nend",
"def download_prices() \n\t\t \t \t \t\n\t\t\t\t@dropbox_token = DropboxSession.deserialize(Linkdropbox.first.dropbox_token)\n\t\t\t\tclient = DropboxClient.new(@dropbox_token)\n\t\t\t\tcontents, metadata = client.get_file_and_metadata('Grupo8/DBPrecios.accdb')\n\t\t\t\tbegin\n\t\t\t\t open('public/jars/DBPrecios.accdb', 'wb') {|f| f.puts contents }\n\t\t\t\t \n\t\t\t\t \n\t\t\t\trescue\n\t\t\t\t flash[:success] = \"Exception occured while downloading...\"\t\t\n\n\t \t\tend \n\n\t \t\tLinkdropbox.import_prices_to_csv\t\n\tend",
"def index\n\t\t@leaders=Leader.all.order(\"first_name ASC, last_name ASC\")\n\n\t respond_to do |format|\n\t format.html\n\t format.csv do\n\t filename = \"mpate-\" + params[:controller] + \"-\" + Time.now.strftime(\"%m-%e-%Y\")\n\t headers['Content-Disposition'] = \"attachment; filename=\\\"#{filename}\\\"\"\n\t headers['Content-Type'] ||= 'text/csv'\n\t end\n\t end\n\tend",
"def index\n @companies = Company.all\n @top_billing = Company.ordered_by_current_bill\n\n @import = Company::Import.new\n # authorize @companies\n skip_authorization\n respond_to do |format|\n format.html\n format.csv { send_data @companies.to_csv, filename: \"companies-export-#{Time.now}-inclustaff.csv\" }\n \tend\n\n end",
"def export_data(tables, min_id=false, max_id=false)\n pause_replication if @master && ! @repl_paused\n import_export_user = 'jetpants'\n create_user(import_export_user)\n grant_privileges(import_export_user) # standard privs\n grant_privileges(import_export_user, '*', 'FILE') # FILE global privs\n reconnect(user: import_export_user)\n @counts ||= {}\n tables.each {|t| @counts[t.name] = export_table_data t, min_id, max_id}\n ensure\n reconnect(user: app_credentials[:user])\n drop_user import_export_user\n end",
"def export_summary(user_data)\n header = []\n row = []\n\n # chose to remove the 'total' key in the hash, figured end user will want to use own calculation on the CSV file when downloaded\n user_data[\"points\"].delete(\"total\")\n\n # put the hash keys into an array to form the header line\n user_data[\"points\"].each_key do |key|\n header << key\n end\n\n # put the values in an array to form the first line\n user_data[\"points\"].each_value do |value|\n row << value\n end\n\n # create the CSV file\n CSV.open(\"summary_#{user_data[\"name\"].downcase.split.join(\"_\")}.csv\", \"wb\") do |csv|\n\n # add the arrays into the file to create the header and first row\n csv << header\n csv << row\n end\n\n # unless something went wrong, show that the data exported correctly\n if File.exist?(\"summary_#{user_data[\"name\"].downcase.split.join(\"_\")}.csv\")\n puts \"\\nFile successfully save!\"\n else\n puts \"\\nThe files did not save, try again.\"\n end\nend",
"def export_histories\n @profiles = User.all if current_user.is? :admin\n\n if @profiles\n path = 'export/'\n name = 'alle_profile_historien.csv'\n File.new(path + name, \"w\").path\n input = \"\"\n\n @profiles.each do |profi|\n \n File.open(path+name, \"w\") do |histories|\n \n input << \"Name, Vorname, Kunden-Nr.,\\n\"\n input << \"#{profi.last_name}, #{profi.first_name}, #{profi.user_number}\\n\"\n\n packages = profi.packages\n orders = profi.orders\n\n unless packages.blank?\n input << \"Pakete-Historie,\" + \"\\n\"\n input << \"Paket-Nr., Erstellt am, Anzahl Kleider, Geschlecht, Beschreibung, Labels,\" + \"\\n\"\n packages.each do |package|\n input << \"#{package.serial_number},#{formatted_date(package.created_at)},#{package.amount_clothes},#{package.sex == true ? \"Mädchen\" : \"Junge\"},\"\n input << \"#{package.notice.gsub(\",\", \" \")},\" unless package.notice.nil?\n input << \"#{package.label.gsub(\",\", \" \").gsub(\"--\", \" \")},\" unless package.label.nil?\n input << \"\\n\"\n end\n end\n \n unless orders.blank?\n input << \"\\n\"+ \"Bestell-Historie,\" + \"\\n\"\n input << \"Bestell-Nr., Bestellt am, Bewerted am , Bewertung, Angekommen?,\" + \"\\n\"\n orders.each do |order|\n input << \"#{order.order_number},#{formatted_date(order.created_at)}, #{formatted_date(order.eva_date_created_at)},\"\n input << \"#{I18n.t(order.evaluation.to_sym)},\" if order.evaluation\n input << \"#{order.received == true ? \"Nein\" : \"Ja\"}\" + \"\\n\"\n end\n\n input << \"\\n\"\n end\n puts input\n histories.write(input) unless input.blank?\n end\n end\n send_file(path+name) \n end\n\n end",
"def welldcsv\n @payment_run = PaymentRun.find(params[:id])\n \n paydates = ''\n paydates = @payment_run.payment_date.to_s(:local) unless @payment_run.payment_date.blank?\n \n send_data(@payment_run.welld_payment_file,:type => 'text/plain', :filename => \"payments-RUN#{@payment_run.id}-#{paydates}.csv\" )\n end",
"def download_handoff_report\n \t\tsearch_parm = search_csv(params)\n \t\tbu = search_parm[0]\n \t\tl1 = search_parm[1]\n \t\tl2 = search_parm[2]\n \t\tl3 = search_parm[3]\n\t\t\n\t\tif params[:report_include_canceled] == \"report_include_canceled\"\n \t\tinclude_cancel = true\n \telse\n \t\tinclude_cancel = false\n \tend\n \tif params[:report_include_onhold] == \"report_include_onhold\"\n \t\tinclude_onhold = true\n \telse\n \t\tinclude_onhold = false\n \tend\n\t\tif params[:report_include_completed] == \"report_include_completed\"\n \t\tinclude_completed = true\n \telse\n \t\tinclude_completed = false\n \tend\t\n\n\t\tputs \"----:#{bu}---: #{l1}---:#{l2}---:#{l3}----can: #{include_cancel}----on: :#{include_onhold}----comp: #{include_completed}\"\n \t\treport_result = WorkFlow.handoff_report_stored_procedure_new(bu, l1, l2, l3, include_completed, include_cancel, include_onhold)\n \t\tcsv_file = WorkFlow.to_csv_new(report_result)\n\n \t\tsend_data csv_file, :filename => 'HAND-OFF-Report-New.csv'\n\n \tend",
"def download_coeffs \n file = Dir.glob(\"#{Rails.root}/public/coeffs/*.csv\")[0].to_s\n logger.debug file\n send_file(file)\n end",
"def export\n headers = JSON[params[:column_array]]\n rows = JSON[params[:row_array]]\n column_names = Array.new\n headers.each do |col|\n column_names << col\n end\n csv_string = CSV.generate do |csv|\n csv << column_names\n rows.each do |row|\n csv << row\n end\n end\n\n filename = params[:file_name] + \".csv\"\n send_data(csv_string,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => filename)\n end",
"def generate_csv\n\n fields = @resource[:class].typus_fields_for(:csv).collect { |i| i.first }\n\n require 'csv'\n if CSV.const_defined?(:Reader)\n # Old CSV version so we enable faster CSV.\n begin\n require 'fastercsv'\n rescue Exception => error\n raise error.message\n end\n csv = FasterCSV\n else\n csv = CSV\n end\n\n filename = \"#{Rails.root}/tmp/export-#{@resource[:self]}-#{Time.now.utc.to_s(:number)}.csv\"\n\n options = { :conditions => @conditions, :batch_size => 1000 }\n\n csv.open(filename, 'w', :col_sep => ';') do |csv|\n csv << fields\n @resource[:class].find_in_batches(options) do |records|\n records.each do |record|\n csv << fields.map { |f| record.send(f) }\n end\n end\n end\n\n send_file filename\n\n end",
"def download_table\n params = download_table_params\n table_name = params[:table_name].parameterize.underscore\n action_name = table_name + \"_table\"\n table_url = self.send(\"#{table_name}_table_admin_reports_path\") + \".csv\"\n filters = params.except(:table_name).to_h\n \n redirect_to({\n controller: 'reports', \n action: action_name, \n format: :csv\n }.merge(filters))\n end",
"def getCSV(restid)\n # get all ordered items of users who ordered in restaurant \"restid\"\n userItems=OrderMng.getAllUsersItemsByRestID(restid)\n csv=[]\n for userItem in userItems\n tmp=Array.new\n for item in userItem\n tmp.push(item)\n end\n csv.push(tmp)\n end\n puts(\"B4 P\")\n p csv\n p userItems\n return userItems\nend",
"def csvexport\n @offer = Offer.find(params[:offer_id])\n projectid = @offer.subsubproject.subproject.project.id\n subprojectid = @offer.subsubproject.subproject.id\n subsubprojectid = @offer.subsubproject.id\n\n\n\n CSV.open(\"offerte#{Time.now.strftime(\"%Y-%m-%d-%H-%M\")}.csv\", \"wb\", {:headers => true, :encoding => \"iso-8859-1\", :col_sep => \";\"}) do |csv|\n csv << ['Offertposition', 'Geraeteanzahl',\n 'Eng Elektroplanung', 'Eng Planung/SW', 'Eng IBN/Bauleitung',\n 'SPS Total Brutto', 'SPS Total Netto',\n 'Schaltanlagen Total Brutto', 'Schaltanlagen Total Netto',\n 'Elektroinstallation Total Brutto', 'Elektroinstallation Total Netto',\n 'Total Brutto', 'Total Netto']\n csv << ['Total', @offer.total_geraeteanzahl, @offer.total_eng_elplanung, @offer.total_eng_planung_sw,\n @offer.total_eng_ibn_bauleitung, @offer.total_sps_total_brutto, @offer.total_sps_total_netto,\n @offer.total_sch_total_brutto, @offer.total_sch_total_netto,\n @offer.total_elinst_total_brutto, @offer.total_elinst_total_netto,\n @offer.total_total_brutto, @offer.total_total_netto]\n\n @offer.offer_offertpositions.each do |offer_offertposition|\n end\n end\n\n redirect_to project_subproject_subsubproject_offer_path(projectid, subprojectid, subsubprojectid, @offer.id),\n :notice => \"Offerte wurde unter \" + Rails.root.to_s + \"/ abgelegt!\"\n end",
"def export(params={})\n columns = delimited_string_to_array(Settings.export.member_fields)\n columns = ['name'] if columns.empty? # to prevent any bad behavior with empty criteria\n send_data Member.export(columns), :filename => \"members.csv\"\n end",
"def index\n @mailing_list = MailingList.find(params[:mailing_list_id]) \n \n @customers = Customer.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @customers }\n format.csv { \n csv_file = Customer.to_csv(current_user.id)\n send_data csv_file, :type => 'text/csv', :disposition => 'attachment;\nfilename=output.csv'\n\n \n\n }\n end\n end",
"def assignments_file_for_sandbox_status(sandbox, project)\n if sandbox\n ensure_sandbox_assignment_csv(project)\n else\n project.local.file('data', 'assignment.csv').as(:csv)\n end\n end",
"def index\n @backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @backend_tutorial_stats }\n format.csv do\n backend_tutorial_stats = Backend::TutorialStat.order('created_at ASC')\n data = Backend::TutorialStat.download_tutorial_stats_csv(backend_tutorial_stats) \n filename = \"tutorial_stats#{Time.now.strftime(\"%Y%m%d%H%M%S\")}.csv\" \n send_data(data, :type => \"text/csv; charset=utf-8; header=present\", :filename => filename) \n end\n end \n end",
"def index\n# @interns = Intern.where(\"is_archived=false\").order(\"created_at desc\")\n @interns = Intern.where(\"is_archived is null or is_archived=false\").order(\"created_at desc\")\n\n @isadmin = is_admin_user?\n unless @isadmin\n redirect_to \"/\" and return\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.csv #{ send_data @interns.as_csv }\n format.json { render json: @interns }\n end\n end",
"def index\n\t\t@users= User.all\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\tformat.csv { send_data Importer.generate(:csv, @users), filename: \"users-#{Date.today}.csv\" }\n\t\tend\n\n\tend",
"def index\n @search = Claim.order('claims.created_at desc').search(params[:q])\n @claims = @search.result(:distinct => true).paginate(:per_page => 50, :page => params[:page])\n respond_to do |format|\n format.html{}\n format.csv {\n send_data generate_csv, :type => 'text/csv; charset=iso-8859-1; header=present', :disposition => \"attachment; filename=claims_list.csv\" \n }\n end\n end",
"def export_to_csv(users)\n CSV.open(\"./user.csv\", \"wb\") do |csv|\n csv << Universe.attribute_names\n users.each do |user|\n csv << user.attributes.values\n end\n end\n end",
"def csv_lead_owner_export\n CSV.generate do |csv|\n header = []\n header << 'Lead_owner'\n header << 'Applicant type'\n header << 'University Name'\n header << 'Braven Region'\n\n csv << header\n\n LeadOwnerMapping.all.each do |m|\n exportable = []\n exportable << m.lead_owner\n exportable << m.applicant_type\n exportable << m.university_name\n exportable << m.bz_region\n\n csv << exportable\n end\n end\n end",
"def export_ansokan_csv\n download_csv(AdminOnly::Reports::ShfApplicationsCsvReport.new, t('.success'), t('.error'))\n end",
"def download\n @posts = PostService.getAllPosts\n respond_to do |format|\n format.html\n format.csv { send_data @posts.to_csv, :filename => \"Post List.csv\" }\n end\n end",
"def download\n file = BruseFile.find_by(:download_hash => params[:download_hash])\n if file.identity.user == current_user\n # send the file to the user\n send_data file.identity.get_file(file.foreign_ref), filename: file.name, type: file.filetype\n end\n end",
"def csv_data\n sql = Utils.setup_basic_filters(SqlAssembler.new, @params.merge('site_id' => @site[:id]))\n sql.set_select 'SELECT package_name, success, title, digitool_id, islandora_pid, content_model, time_started, time_finished, bytes_ingested FROM islandora_packages'\n sql.set_order 'ORDER BY id DESC'\n return sql.execute\n end",
"def report_csv_download(report)\n\t\t\tpost= { \"token\" => @token, \"report\" => report, \"xslt\" => 'csv.xsl' }\n\n\t\t\t# Get CSV report location and build post params\n\t\t\tfilename=nessus_http_request('file/xslt', post).scan(/fileName=(.*csv)/).flatten.first\n\t\t\tpost= {\"token\" => @token, 'fileName' => filename, 'step' => 2}\n\n\t\t\t# Allow for generation time\n\t\t\tRex::ThreadSafe.sleep(0.5)\n\n\t\t\t# Get CSV report\n\t\t\tfile=nessus_http_request('file/xslt/download',post)\n\n\t\t\treturn file\n\t\tend",
"def csv(section = 'main', q='google',date='ytd',geo='')\n trend_params = {\"graph\"=>\"all_csv\", \"sa\" => \"N\"}\n trend_params[\"q\"] = q\n trend_params[\"date\"] = date\n if !geo || geo != ''\n trend_params[\"geo\"] = geo\n end\n\n data = @client.get_content(URI.parse(@url_Export), trend_params)\n # empty to return all data\n if section == ''\n return CSV.parse(data)\n end\n # split data into sections\n segments = data.split(\"\\n\\n\\n\")\n if section == 'main'\n section = ['Week', 'Year', 'Day','Month']\n else\n section = [section]\n end\n\n for x in segments do\n if section.include? x.split(',')[0].strip\n maindata = CSV.parse(x)\n return maindata\n end\n end\n end",
"def gather_data # The commands that retrieve required data from the User\n puts 'This script takes in a 1 column csv (set your column header to course_id)'\n puts 'Enter the Bridge instance URL (e.g. https://ibennion.bridgeapp.com)'\n @url = gets.chomp! # Prompts user for desired Bridge domain\n puts 'Enter your Bridge API Key'\n @token = gets.chomp!\n puts 'These calls require you masquerade as an admin. What is the admin user ID?'\n @admin_id = gets.chomp! # The 'publish' endpoint requires you masquerade as an admin. Set the admin's User ID here.\n puts 'Enter the path to your CSV mapping file (e.g. /Users/ibennion/Documents/mapping.csv)'\n @csv_path = gets.chomp! # Set your path to the csv file. e.g. '/Users/ccromar/Downloads/sample.csv'\nend",
"def index\n @beck =GrassAllocation.page params[:page]\n @grass_allocations = GrassAllocation.all\n #The following code was based on a video found on www.driftingruby.com/episodes/importing-and-exporting-csv-data\n respond_to do |format|\n format.html \n format.csv {send_data @grass_allocations.to_csv} \n end \n end",
"def store_creddentials\n CSV.open(ENV['HOME'] + '/creddentials.csv', 'w') do |csv|\n csv << [@email, @password]\n end\n read_creddentials\n end"
] | [
"0.72140783",
"0.720665",
"0.67683274",
"0.6712821",
"0.6648203",
"0.6629684",
"0.65356046",
"0.6455421",
"0.6428573",
"0.6386088",
"0.63263327",
"0.6281056",
"0.6259903",
"0.62355286",
"0.62216735",
"0.6213511",
"0.620566",
"0.61847556",
"0.6174584",
"0.6166345",
"0.61079746",
"0.6098308",
"0.6075496",
"0.6051209",
"0.6039447",
"0.60301566",
"0.60278356",
"0.60272324",
"0.60178554",
"0.5992366",
"0.5959538",
"0.5933445",
"0.59002167",
"0.5881761",
"0.5876513",
"0.58758944",
"0.5864858",
"0.58563024",
"0.5844994",
"0.5820554",
"0.5802688",
"0.57879233",
"0.5770934",
"0.57247514",
"0.57202584",
"0.57082564",
"0.5706294",
"0.5702017",
"0.56923515",
"0.5674776",
"0.56697303",
"0.56582797",
"0.5654282",
"0.5648557",
"0.56457144",
"0.56299937",
"0.56260973",
"0.5622522",
"0.55999225",
"0.5599576",
"0.55898696",
"0.55619395",
"0.5546291",
"0.5544725",
"0.5544527",
"0.5536809",
"0.55361605",
"0.5535556",
"0.55312806",
"0.5529334",
"0.5527937",
"0.55275655",
"0.5524268",
"0.5522915",
"0.5521554",
"0.5517603",
"0.5514322",
"0.54989266",
"0.549808",
"0.5486757",
"0.54796475",
"0.5475029",
"0.54714537",
"0.54639155",
"0.5460443",
"0.54582214",
"0.5453555",
"0.54524446",
"0.54518527",
"0.54499966",
"0.5445176",
"0.54450876",
"0.54241276",
"0.54227334",
"0.5420799",
"0.5416166",
"0.5407131",
"0.5401323",
"0.53998524",
"0.53972006"
] | 0.68278235 | 2 |
passes all Users research_individual page | def research_individual
@users = Array.new()
User.all.each do |u|
if !u.admin?
@users.push(u)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def research_index\n @houdd_user = HouddUser.find(params[:user_id])\n @researches = HouddUser.find(params[:user_id]).researches\n\n respond_to do |format|\n format.html # research_index.html.erb\n end\n end",
"def search_results\n @individual = Individual.by_uid( params[:'names-search-uid'] )\n if @individual\n redirect_to display_path( @individual.uid )\n else\n redirect_to :search\n end\n end",
"def search\n @lawyer = User.first\n expertise = Expertise.find_by_id(params[:id])\n @users = UserExpertise.where(expertise_id: expertise)\n end",
"def index\n @admin_acr = params[:admin_acr]\n\n @all_users = params[:all_users]\n @all_institutions = params[:all_institutions]\n\n @q = params[:q]\n\n @users = User.order(:first_name, :last_name)\n @institutions = Institution.order(full_name: :asc)\n\n if (!@q.blank? && !@q.nil?)\n @users = @users.search_terms(@q)\n end\n\n case @all_users\n when \"all\"\n @users = @users.page(params[:page]).per(9999)\n else\n @users = @users.page(params[:page]).per(10)\n end\n\n case @all_institutions\n when \"all\"\n @institutions = @institutions.page(params[:page]).per(9999)\n else\n @institutions = @institutions.page(params[:page]).per(10)\n end\n\n end",
"def search_users\n unless @current_admin.is_super_admin\n unless @current_admin.privilages.include? '1'\n flash[:authority_error]=\"You are not authorized to navigate to this page \"\n redirect_to admin_index_path\n empty_user_id\n return\n end\n end\n empty_user_id\n @check=0\n @searched_user=User.new\n end",
"def find_people\n @users=[]\n session[:search_opt] = params[:user]\n if request.post?\n @users = @login_user.search_query(params[:user],1)\n flash[:notice] = \"No results found.\" if @users.empty?\n end\n end",
"def index\n registered_user_information = UserInformation.find_by(user_id: \"#{@user.id}\")\n if !@user.is_elevated?\n if registered_user_information.nil?\n redirect_to new_user_information_url\n else\n redirect_to user_information_url registered_user_information.id\n end\n else\n @user_informations = UserInformation.all.order(updated_at: :desc).page(params[:page])\n authorize @user_informations\n respond_with(@user_informations)\n end\n end",
"def index\n if user_signed_in?\n if current_user.role != 2\n redirect_to root_path\n else\n @institute_user = InstituteUser.find_by user_id: current_user.id\n @institute = Institute.find(@institute_user.institute_id)\n #@students = Student.all\n\n @query = Student.where(\"institute_id = ?\", @institute.id).ransack(params[:q])\n @institute_students = @query.result \n end\n else\n redirect_to root_path\n end\n end",
"def search\n @users ||= User.search_user(params[:search])\n authorize! :read, @user\n end",
"def index\n authorize_action_for User, at: current_store\n query = saved_search_query('user', 'admin_user_search')\n @search = UserSearch.new(query.merge(search_constrains))\n @users = @search.results.page(params[:page])\n end",
"def index_user\n p \"*\" * 50\n p \"index_user\"\n if current_user.status == \"teacher\" || current_user.admin?# && !current_user?(user)\n user_id = current_user.id\n @user = User.find(user_id)\n @exercises = Exercise.where(user_id: @user.id)\n elsif current_user.status == \"student\"\n redirect_to(current_user)\n end\n \n end",
"def index\n @information_personals = InformationPersonal.find_by(user_id: current_user.id)\n \n if !@information_personals\n redirect_to new_information_personal_path\n end\n end",
"def user_search_results\n @limit = 25\n load_facility\n @price_group = PriceGroup.find(params[:price_group_id]) if params[:price_group_id].present?\n @account = Account.find(params[:account_id]) if params[:account_id].present?\n @product = Product.find(params[:product_id]) if params[:product_id].present?\n @search_type = valid_search_types.find { |t| t == params[:search_type] }\n @users, @count = UserFinder.search_with_count(params[:search_term], @limit)\n\n render layout: false\n end",
"def index\n if isAdmin?\n @title= \"Listing All Professors\"\n @users = User.where('role' => 2).paginate(page: params[:page])\n if(!@users.any?)\n flash[:notice]= \"No professor on CMS. To create a new professor click on Create New Professor\"\n end\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n else\n flash[:notice]= \"You don't have permission to browse all users\"\n redirect_to courses_path\n end\n end",
"def suggest\n main_page_cred\n @user = current_user\n end",
"def index\n params[:q] = {} if params[:q].blank?\n params[:q][:company_id_eq] = current_user.company_id\n @q = User.search(params[:q])\n @users = @q.result(:distinct => true).paginate(:page => params[:page])\n end",
"def students\n @users = User.paginate(:page => params[:page], :per_page => 20).student\n @filter = \"All Students\"\n render :index\n end",
"def index\n @join_user_to_assistants = JoinUserToAssistant.all\n end",
"def index\n @users = User.all\n\n # If a level is passed, check if it's a number and a valid level of privileges.\n check_level_param\n @users = @users.where(:level => @level) if (!@level.blank?)\n\n # If a search query is received, filter the results\n if (!params[:q].blank?)\n # Do the search\n @query = params[:q]\n @users = @users.where(\"$or\" => [{:name => /#{@query}/i}, {:email => /#{@query}/i}])\n end\n\n # If a page number is received, save it (if not, the page is the first)\n if (!params[:page].blank?)\n page = params[:page].to_i\n page = 1 if (page < 1)\n else\n page = 1\n end\n \n # Paginate!\n @users = @users.page(page)\n\n respond_to do |format|\n format.html\n end\n end",
"def index\n @institute_admins = case current_user.rc\n when 'EA'\n InstituteAdmin.includes(:profile).page(params[:page])\n when 'IA'\n InstituteAdmin.includes(:profile).where(:id=>current_user.institute_admin).page(params[:page])\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @institute_admins }\n end\n end",
"def index\n @regdetails = Regdetail.select(:user_id).distinct\n @user_ids = @regdetails.collect { |r| r.user_id }\n @registered_users = User.find(@user_ids).paginate(:page => params[:page], :per_page => 5)\n # @registered_users = @registered_users.paginate(:page => params[:page], :per_page => 4)\n\n end",
"def results\n if current_user.admin?\n @students = Student.all.search(params[:search])\n else\n @students = current_user.students.search(params[:search]) \n end\n \n respond_to do |format|\n format.html\n format.js\n end\n end",
"def search_user\n \t@users = UsersService.searchUser(params)\n render :index\n end",
"def index\n @edu_docs = current_user.edu_docs\n end",
"def registrations\n @search = User.metasearch(params[:search])\n @users = @search.where(:state => 'new').paginate(:page => @page, :per_page => @per_page)\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @users }\n end\n end",
"def index\n if current_user.admin == \"yes\"\n\t\n \t# if params[:user][:id]\n \t# @incidents = Incident.where(\"user_id = ?\", params[:user][:id])\n \t# @users = User.all\n \t# else\t \n # Bens code\n # @incidents = Incident.all(:order => 'id DESC')\n \n \n # If an admin logs in\n # Fiters through each department\n if params[:search]\n @incidents = Incident.paginate(:page => params[:page], :per_page => 10, :order => 'id DESC').find(:all, :conditions => ['department LIKE ?', \"%#{params[:search]}%\"], :joins => [:user => :profile])\n # Search through specific column\n elsif params[:searchInvoice]\n @incidents = Incident.paginate(:page => params[:page], :per_page => 10, :order => 'id DESC').find(:all, :conditions => ['report_nr LIKE ?', \"%#{params[:searchInvoice]}%\"])\n # Display all with pagination\n else\n @incidents = Incident.paginate(:page => params[:page], :per_page => 10, :order => 'id DESC')\n end\n\t\n \n \n # If not an admin\n \telse\n @incidents = current_user.incidents.paginate(:page => params[:page], :per_page => 10, :order => 'id DESC')\n @users = User.all\n \n end\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @incidents }\n end\n end",
"def index\n if current_user.local_access?\n @uos = {current_user.uo.nome => current_user.uo.id}\n\n if !params[:q].blank? && !params[:q][:uo_id_eq].blank?\n if params[:q][:uo_id_eq].to_i != current_user.uo_id\n params[:q][:uo_id_eq] = -1\n end\n end\n\n @q = User.where(\"uo_id = ?\", current_user.uo_id).order(\"nome ASC\").search(params[:q])\n else\n @uos = Uo.order(\"nome ASC\").all.collect {|o| [o.nome, o.id]}\n\n @q = User.order(\"nome ASC\").search(params[:q])\n end\n\n @users = @q.result.page(params[:page])\n @total_registros = @q.result.count\n @roles = Role.order(\"nome ASC\").collect{ |r| [r.nome, r.id]}\n end",
"def index\n @title = 'Enroller Dashboard'\n @possible_jurisdiction_paths = current_user.jurisdiction.subtree.pluck(:id, :path).to_h\n @all_assigned_users = current_user.patients.where.not(assigned_user: nil).pluck(:assigned_user).uniq.sort\n redirect_to(root_url) && return unless current_user.can_create_patient?\n end",
"def index\n [:id, :lecturer_id, :student_id, :email, :jwt].each do |param|\n unless search_params[param].blank?\n if param == :jwt\n @user = User.find_by_jwt(search_params[:jwt])\n else\n @user = User.find_by(param => search_params[param])\n end\n\n break\n end\n end\n \n if @user.blank?\n render :status => :not_found\n else\n render :status => :ok\n end\n end",
"def search_user\n search = Sunspot.search User do\n fulltext search_params\n end\n r = Hash.new\n search.results.each do |u|\n r.store(u.id,u)\n end\n render :json => r.to_json\n end",
"def index\n @gradeworks = Gradework.all\n @juries = User.users_jury\n @directors = User.users_director\n @students = User.users_student\n end",
"def profile_specialist_search\n\t\t# search the the speciality name according to the terms\n\t\tspecialities = Speciality.any_of({ :name => /^#{params[:term]}/i }).all.collect{|speciality| {label: speciality.name ,value: speciality.id.to_s}}.to_json \n\t\t# render to the surgery name page\n\t\trespond_to do |format|\n\t\t format.json { render :json => specialities }\n\t\tend\n\tend",
"def search_user\n @users = UserService.searchUser(params)\n render :index\n end",
"def index\n authorize HigherEducationInstitution\n @higher_education_institutions = HigherEducationInstitution.order(:name).search(params[:search]).page(params[:page])\n end",
"def scrape_all\n\t\tpage = fetch_page(@base_url)\n\t\turl_list = get_url_list(page)\n\t\turl_list.each do |url|\n\t\t\t#scrape the page\n\t\t\tuser = User.new(url,@user_agent)\n\t\t\tuser.scrape\n\t\tend\n\tend",
"def index\n # @priest_users = PriestUser.all\n area = Area.doareaid(params['area'].to_s)\n religion = params['religion'].to_s\n area.each do |ar|\n @areaid = ar.id\n end\n @results = PriestUser.where(area_id: @areaid,religion: religion).soting\n end",
"def index\n @corporate_users = User.get_all_users(current_user).where.not(id: current_user.id)\n @corporate_users = @corporate_users.paginate(:page => params[:page],:per_page => 10).order('id DESC')\n end",
"def index\n user = current_user\n if not current_user.admin\n lab_ids = []\n user.labs.each do |lab|\n lab_ids.push(lab.id)\n end\n if params[:result] == \"true\"\n patients = Patient.includes(:users_patients)\n .where('result = true and (users_patients.user_id = ? or lab_id IN (?))', user.id, lab_ids)\n .references(:users_patients)\n @patients = patients.order(:case_id).page(params[:page]).per(25)\n else\n patients = Patient.includes(:users_patients)\n .where('users_patients.user_id = ? or lab_id IN (?)', user.id, lab_ids)\n .references(:users_patients)\n @patients = patients.order(:case_id).page(params[:page]).per(25)\n end\n else\n if params[:result] == \"true\"\n @patients = Patient.where(result: true).order(:case_id).page(params[:page]).per(25)\n else\n @patients = Patient.order(:case_id).page(params[:page]).per(25)\n end\n end\n end",
"def index\n @has_help = true\n @page_title = \"Lista Ricerche\"\n @searches = Search.where(:user_id => current_user.id )\n if !current_user.ispro\n flash.now[:warning] = \"Attiva il piano MyAgencyCall! Pro! per creare altre ricerche!\"\n end\n end",
"def index\n @users = if params[:q].blank?\n User.order(:email).page params[:page]\n else\n elastic_search_results(User)\n end\n end",
"def search_collaborator\n # Search for email or name of collaborator\n projects = current_user.projects\n @users = Array.new\n projects.each do |project|\n project.users.each do |user|\n unless @users.include? user\n @users.push user\n end\n end\n end\n\n # Get all users if current user is a manager or admin\n if manager? || admin?\n @users = current_user.university.users\n end\n\n # Return a JSON with \"label\" and \"value\" as key required by JQueryUI autocomplete\n result = Array.new\n @users.each do |user| \n label = user.first_name + \" \" + user.last_name + \" - \" + user.email\n item = Hash[ \"label\" => label, \"value\" => user.email ]\n result.push item\n end\n\n render :json => result\n end",
"def search\n @users = User.all.order(:user_id)\n if params[:search][:annual].present?\n @users = @users.where(\"annual like '%\" + params[:search][:annual] + \"%' \").order(:user_id) \n end\n if params[:search][:name].present?\n @users = @users.where(\"name like '%\" + params[:search][:name] + \"%' \").order(:user_id)\n end\n @name = params[:search][:name]\n @annual = params[:search][:annual]\n render :index\n end",
"def index\r\n @analyses = Analysis.all\r\n @user = User.find_by_email($user_email) \r\n end",
"def index\n @searches = current_user.searches.all\n end",
"def search(user, query, collection, wiki)\n end",
"def scrape\n User.all.each do |user|\n user.scrape_articles\n end\n render nothing: true\n end",
"def index\n authorize User\n\n @users = !!params[:search] ? User.search_includes(params[:search]) : User.all_includes\n\n @users = User.filter_with(@users, params[:filter]) if !!params[:filter]\n\n @users = User.sort_num_desc(@users, params[:sort_by]) if !!params[:sort_by]\n end",
"def index\n @crawls = Crawl.all\n @ability = Ability.new(current_user)\n # Check the search fields for text, then generate results list\n # if params[:text_search].present? && params[:number_search].present?\n\n if params[:number_search].present? && params[:text_search].present?\n @results = Array.new\n flash[:notice] = \"Search using only one field.\"\n elsif params[:number_search].present?\n @results = Crawl.where(\"user_id = ?\", params[:number_search])\n elsif params[:text_search].present?\n @results = Crawl.search(params[:text_search])\n end\n end",
"def index\n \t\n @users = User.search(params[:search], params[:page])\n @total_count = User.search(params[:search],\"\").count\n \n @menu = \"home\"\n @board = \"user\"\n @section = \"index\"\n \n render 'user'\n end",
"def index\n \t@prevSort = params[:sort].nil? ? \"lname\" : params[:sort].gsub(\"%20\",\" \") \n \t@dept = params[:dept] || \"%\"\n\n if params[:center].nil?\n @researchers = Researcher.search(params[:search]).where(:dept.matches => @dept).order(@prevSort).page(params[:page]).per(5)\n else\n @researchers = Center.find_by_abbreviation(params[:center]).researchers.order(@prevSort).page(params[:page]).per(5)\n end\n \n @new_faculty = Researcher.recent\n\n\n \trespond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @researchers }\n end\n end",
"def index\n\t\t@users = User.paginate(:page => params[:page], :per_page => 5)\n\t\tauthorize @users\n\tend",
"def index\n #redirect_to(:action => 'signup') unless logged_in? || Person.count > 0\n @people = Person.find(:all)\n @users = Ldapuser.find(:all)\n @groups = LdapGroup.find(:all)\n #@group = LdapGroup.find(\"1046\")\n end",
"def my_results(user)\n self.design_review_results.to_ary.find_all { |drr| drr.reviewer == user }\n end",
"def index\n @user_responsibilities = UserResponsibility.all\n end",
"def index\n authenticate_user\n @users = User.all\n @users = User.find_all_by_last_name(params[:search])\n end",
"def index\n @users = User.search_user(params[:search_user])\n end",
"def index\n if current_user.has_role?(:superadmin)\n @users = User.with_role(:member).\n order(:email).\n search(filter).\n paginate(page: params[:page], per_page: 12)\n else\n redirect_to authenticated_root_url, alert: 'You are not authorized to access this page.'\n end\n end",
"def index\n @researchers = Researcher.all\n end",
"def user_search\n @users = User.admin_search(params[:query])\n end",
"def index\n @consultations = current_user.consultations.all\n\n if params[:title].present?\n @consultations = @consultations.where(\"lower(title) ilike '%#{params[:title].downcase}%'\")\n end\n end",
"def index\n# authorize! :index, Observation\n if current_user.has_role? :teacher\n @observations = Observation.for_teacher(current_user).complete.most_recent.paginate(:page => params[:page], :per_page => 10)\n elsif current_user.has_role? :principal\n if !current_user.p_school.nil?\n if params[:my_observations]\n @observations = current_user.p_observations.most_recent.filter(params.slice(:active, :for_content_area, :for_grade, :for_school_year, :teacher_search)).paginate(:page => params[:page], :per_page => 10)\n else\n @observations = Observation.for_school(current_user.p_school).most_recent.filter(params.slice(:active, :for_content_area, :for_grade, :for_school_year, :teacher_search)).paginate(:page => params[:page], :per_page => 10)\n end\n else\n flash[:error] = \"You are not currently assigned to a school\"\n redirect_to root_url\n end\n elsif current_user.has_role? :specialist\n if params[:my_observations]\n @observations = Observation.for_specialist(current_user).most_recent.filter(params.slice(:active, :for_school, :for_content_area, :for_grade, :for_school_year, :teacher_search)).paginate(:page => params[:page], :per_page => 10)\n else\n @observations = Observation.most_recent.filter(params.slice(:active, :for_school, :for_content_area, :for_grade, :for_school_year, :teacher_search)).paginate(:page => params[:page], :per_page => 10)\n end\n else\n @observations = Observation.most_recent.filter(params.slice(:active, :for_school, :for_content_area, :for_grade, :for_school_year, :teacher_search)).paginate(:page => params[:page], :per_page => 10)\n end\n \n # School Year Dropdown list\n current_year = Date.current.year\n if Date.current < Date.new(current_year,7,1)\n latest_year = current_year-1\n else\n latest_year = current_year\n end\n \n @year_list = []\n while latest_year >= 2009\n @year_list.push(\"#{latest_year}-#{latest_year+1}\")\n latest_year-=1\n end\n \n @schools = School.active\n end",
"def index\n @myarticles = Myarticle.search_article(current_user.id)\n end",
"def index\n if params[:search].blank?\n if params[:tag].nil?\n # filter\n #@group = Group.find(params[:group_id]) unless params[:group_id].nil?\n #@project = Project.find(params[:project_id]) unless params[:project_id].nil?\n #@office = Group.find(params[:office_id]) unless params[:office_id].nil?\n #@role = Role.find(params[:role_id]) unless params[:role_id].nil?\n\n # filter\n @groups = Group.all\n @projects = Project.of_group(@group)\n @offices = Office.in_group(@group).on_project(@project)\n @roles = Role.all\n\n # contents\n @people = @users = User.of_group(@group).in_office(@office).on_project(@project).of_role(@role)\n else\n @users = User.tagged_with params[:tag]\n end\n else\n @users = User.search params[:search]\n end\n @people = @users\n end",
"def index_lawfirm_users\n if current_user.id == current_user.lawfirm.user_id\n @users = User.where(lawfirm_id: current_user.lawfirm.id).load\n else\n flash[:danger] = \"You must have administrative privileges to view this page.\"\n redirect_to user_cases_path\n end\n end",
"def index\n\t@title = \"My Contacts\"\n\t@users = []\n\tcontacteds = Relationship.where(\"accessed_id = ? AND accepted = ?\", current_user.id, true).paginate(:page => params[:page])\n\tcontactors = Relationship.where(\"accessor_id = ? AND accepted = ?\", current_user.id, true).paginate(:page => params[:page])\n\tcontacteds.each do |contacted|\n\t\t@users.push(User.find_by_id(contacted.accessor_id))\n\tend\n\tcontactors.each do |contactor|\n\t\t@users.push(User.find_by_id(contactor.accessed_id))\n\tend\n\t\n\t@results = @users.paginate(:page => params[:page])\n end",
"def search_results\n @query = params[:user]\n processed_query = sanitize(@query)\n @source = params[:source]\n @results = ApiHelper.user_search(processed_query, USER_COUNT, @source)\n if @results.count == 0\n flash[:notice] = \"No users matching '#{@query}'.\"\n end\n render 'search'\n end",
"def index\n \n if current_user.admin?\n @students = Student.all.search(params[:search])\n else\n @students = current_user.students.search(params[:search]) \n end\n \n respond_to do |format|\n format.html\n format.js\n end\n \n # flash[:danger] = \"No record found\" if @students.nil? || @students.blank?\n\n end",
"def index\n @user_situations = UserSituation.all\n end",
"def index\n if current_user.role == \"su\"\n @organizations = Organization.all\n elsif current_user.role == \"ru\"\n @organizations = Organization.find_by_sql(\"select * from organizations where id in (select organization_id from org_users where user_id = #{current_user.id}) \")\n elsif current_user.role == \"sub\"\n @projects = Project.find_by_sql(\"select * for projects where id in (select project_id from user_project where user_id = '#{current_user.id}')\")\n end \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @users }\n end\n end",
"def index\n if current_user.admin?\n redirect_to rails_admin_url\n elsif current_user.reviewer?\n @q = SubjectOfSpeech.search(params[:q])\n @subject_of_speeches = @q.result(distinct: true)\n else\n # 自分で投稿した内容のみ表示する。\n # @subject_of_speeches = SubjectOfSpeech.all\n # @subject_of_speeches = current_user.subject_of_speeches.all\n @q = current_user.subject_of_speeches.search(params[:q])\n # @q = SubjectOfSpeech.search(params[:q])\n @subject_of_speeches = @q.result(distinct: true)\n # binding.pry\n end\n end",
"def index\n @expert_personal = ExpertPersonal.find_by_user_id(self.current_user.id)\n\tif (@expert_personal != nil)\n\t redirect_to('/expert_personals/edit/'+@expert_personal.id.to_s)\n\telsif\n\t redirect_to('/expert_personals/new')\n\tend \n \n end",
"def create\n\n \n @research = Research.new(research_params)\n @user = User.find(session[:user_id]) \n \n respond_to do |format|\n if @research.save\n @user.researches << @research\n format.html { redirect_to @research, notice: 'La actividad se registro con exito..' }\n format.json { render :show, status: :created, location: @research }\n else\n format.html { render :new }\n format.json { render json: @research.errors, status: :unprocessable_entity }\n end\n end\n\n end",
"def index\n @user = User.find(params[:user_id])\n @conditions = @user.conditions\n\n if current_user.id == @user.id\n\t\t\trender action: :index\n\t\telse\n\t\t\trender file: 'public/denied'\n\t\tend\n end",
"def index\n @hackathons = Hackathon.search(params[:search]) \n if defined?(current_user.id) && (current_user.id != '') then \n @hackathons = Hackathon.where(:user_id => current_user.id)\n else\n redirect_to root_url \n #render 'hackathons/index.html.erb'\n end \n \n end",
"def index\n @users = User.only_students\n @current_term = Term.where(:is_active => true)\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @users }\n end\n end",
"def index\n if current_company.users.include?(current_user)\n @employees = current_company.employees\n else\n redirect_to root_path\n end\n end",
"def index\n\n # Grab user's diets as saved in profile and pass it into the search\n user_diets = current_user.compile_diets\n\n if params[:search_by_diet].present?\n allowed_diets_in_search = (user_diets << params[:search_by_diet]).flatten!\n else\n allowed_diets_in_search = user_diets\n end\n\n # Grab user's allergies as saved in profile and pass it into the search\n user_allergies = current_user.compile_allergies\n\n if params[:search_by_allergy].present?\n allowed_allergies_in_search = (user_allergies << params[:search_by_allergy]).flatten!\n else\n allowed_allergies_in_search = user_allergies\n end\n\n user_excluded_ingredients = current_user.compile_excluded_ingredients\n\n @results = Yummly.search(\n params[:search_by_all],\n \"maxTotalTimeInSeconds\" => params[:search_by_time],\n \"allowedCourse[]\" => params[:search_by_course],\n \"allowedAllergy[]\" => allowed_allergies_in_search,\n \"allowedDiet[]\" => allowed_diets_in_search,\n \"excludedIngredient[]\" => user_excluded_ingredients,\n maxResult: 50);\n \n\n end",
"def index\n @user_personal_informations = User::PersonalInformation.all\n end",
"def show\n study_id = params[:study_id]\n creator_id = params[:creator_id]\n @study = Study.find(:first, :conditions=>[\"id=?\",study_id], :include=>[:primary_publication, :primary_publication_numbers])\n @user_studies = Study.joins(:primary_publication).find(:all, :conditions=>[\"creator_id = ? and studies.id <> ? and project_id = ?\",creator_id, study_id, @study.project_id], :include=>[:primary_publication, :primary_publication_numbers], :order=>[\"title ASC\"])\n all_user_ids = UserProjectRole.find(:all, :conditions=>[\"project_id=? AND role IN (?)\", @study.project_id, [\"lead\",\"editor\"]], :select=>[\"user_id\"])\n @all_users = User.find(:all, :conditions=>[\"id IN (?)\", all_user_ids.collect{|x| x.user_id}], :order=>[\"login ASC\"])\n end",
"def index\n\t @referrals = current_user.referrals.paginate(:page => params[:page])\n end",
"def render_user_selection_page\n @users = User.all\n\n # Remove any group_id param in the url\n saml_url_without_user_id = request.original_url.gsub(/(&user_uid=[^&]*)/,\"\")\n\n # Build SAML replay url for each user\n @replay = {}\n\n @users.each do |user|\n @replay[user.id] = {}\n @replay[user.id][:url] = \"#{saml_url_without_user_id}&user_uid=#{user.uid}\"\n\n @replay[user.id][:access] = false\n @replay[user.id][:access_count] = 0\n user.groups.each do |group|\n if (group.app == current_app)\n @replay[user.id][:access] =true\n @replay[user.id][:access_count] += 1\n end\n end\n end\n\n render template: \"shared/auth/select_user_to_login\", layout: 'application'\n end",
"def index\n # if the user is searching\n if params[:q]\n @people = Person.q(params[:q])\n # if the user has a school\n elsif current_user.school\n @people = current_user.school.people.order('dream_team DESC', :first_name, :last_name)\n else\n @people = current_user.created_people.order('dream_team DESC', :first_name, :last_name)\n end\n end",
"def index\n @doctors = policy_scope(User)\n\n if params[:specialty_or_field].present?\n search_by_specialty_or_field(params[:specialty_or_field])\n elsif params[:specialty].present? || params[:field].present? || params[:name].present? || params[:location].present?\n @doctors = search_doctor_by_specialty(params[:specialty]) if params[:specialty].present?\n @doctors = search_doctor_by_field(params[:field]) if params[:field].present?\n @doctors = search_doctor_by_name(params[:name]) if params[:name].present?\n @doctors = search_doctor_by_location if params[:location].present?\n else\n @doctors\n end\n\n @markers = get_info_for_map_markers(@doctors) if @doctors != []\n end",
"def suggest_user\n skope = User.scoped\n skope = skope.where(\"username LIKE ?\", \"%#{params[:term]}%\")\n @team.members.all.each do |member|\n skope = skope.where(User.arel_table[:id].not_eq(member.user.id))\n end\n\n skope = skope.limit(10)\n\n respond_with(skope.all.map{|x| {:label => x.username, :value => x.id}})\n end",
"def index\n\n if (!params[:expertise].nil?)\n @expertise=params[:expertise]\n people=Person.tagged_with(@expertise, :on=>:expertise)\n @people = people.paginate(:page=>params[:page] ? params[:page] : 1, :per_page=>default_items_per_page)\n else\n people = Person.all(:order=> \"last_name, first_name\")\n @people = people.paginate(:page=>params[:page] ? params[:page] : 1, :per_page=>default_items_per_page)\n end\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @people.to_xml}\n end\n end",
"def show\r\n @principals = User.where(:user_type => \"principal\")\r\n @teachers = User.where(:user_type => \"teacher\")\r\n @activePrincipals = SchoolUser.all\r\n @activeTeachers = SchoolUser.all\r\n\r\n end",
"def index\n\n calculate_ranges \n\n if params[:type] == \"staff\" # searching for teacher\n\n @users = User.where(id: params[:id])\n # @reports = @users.map(&:reports)\n @reports = @users.map(&:reports).flatten.collect{|a| @find_range.include?(a.academic_session_id) ? a : nil}.compact.sort_by{|a| [a.submitted_at.blank? ? a.created_at : a.submitted_at , a.created_at] }.reverse\n\n @results = [['User', @users], ['Report', @reports]]\n \n @show_view = \"user\"\n\n \n elsif params[:type] == \"student\" # searching for student\n\n @students = Student.where(id: params[:id])\n \n @reports = @students.map(&:reports).flatten.collect{|a| @find_range.include?(a.academic_session_id) ? a : nil}.compact.sort_by{|a| [a.submitted_at.blank? ? a.created_at : a.submitted_at, a.created_at] }.reverse\n\n @results = [['Student', @students], ['Report', @reports]]\n\n @show_view = \"student\"\n\n else # Universal Search\n\n query = params[:q]\n\n if query.index(\" \")\n lim = query.index(\" \")\n term1 = query[0..lim]\n term2 = query[lim+1..-1]\n else\n term1 = query\n term2 = query\n end\n\n @users = User.order(:fname).where(\"fname LIKE ? OR lname LIKE ?\", \"%#{term1}%\", \"%#{term2}%\")\n @students = Student.order(:fname).where(\"fname LIKE ? OR lname LIKE ?\", \"%#{term1}%\", \"%#{term2}%\")\n\n @stud_reports = @students.map(&:reports).flatten.collect{|a| @find_range.include?(a.academic_session_id) ? a : nil}.compact\n @user_reports = @users.map(&:reports).flatten.collect{|a| @find_range.include?(a.academic_session_id) ? a : nil}.compact\n\n @reports = (@stud_reports + @user_reports).uniq.sort_by{|a| [a.submitted_at.blank? ? a.created_at : a.submitted_at , a.created_at] }.reverse\n\n @results = [['Student', @students], ['User', @users], ['Report', @reports]]\n\n @show_view = \"univ\"\n\n end\n end",
"def divisions_users_search\n render 'activities/divisions/division_users_search'\n end",
"def index\n @collaborators = User.includes(:github_account).\n where.not(id: params[:ineligible_user_ids]).\n limit(20)\n\n if params[:q]\n @collaborators = @collaborators.search(params[:q])\n end\n\n respond_to do |format|\n format.json\n end\n end",
"def search\n\n @accounts = []\n \n PgSearch.multisearch(params['search-string']).where(:searchable_type => \"Account\").find_each do |document|\n @accounts << document.searchable if Account.user_readable(current_user).include?(document.searchable)\n end\n\n respond_to do |format|\n format.html { render action: \"index\" }\n format.json { render json: @account.errors, status: :unprocessable_entity }\n end\n \n end",
"def index\n @organizations = Organization.all.alphabetical\n @participant = Participant.new\n @users = User.all.map{|user| user} #Individual.all.map{|indiv| indiv.user.id}\n @names_from_users = []\n @users.each do |user|\n # if user does not have an individual, then just use email\n if user.individual.nil?\n @names_from_users << [user.email, user.id]\n else\n @names_from_users << [user.individual.name, user.id]\n end\n end\n @names_from_users.sort!\n end",
"def index\n @user = User.find(@current_user.id)\n @schools = School.all\n if @current_user.usertype == 2\n @school = School.find(SchoolUser.find_by!(:user_id => @current_user.id))\n elsif @current_user.usertype == 3\n @student = Student.find_by!(:user_id => @current_user.id)\n elsif @current_user.usertype == 4\n @ngo = Ngo.find(NgoUser.find_by!(:user_id => @current_user.id))\n end\n @education_levels = EducationLevel.all\n end",
"def index\n @professors = Professor.all if current_user.user_type == 1\n end",
"def index\n # @info_practices = InfoPractice.page(params[:page])\n\n @q = InfoPractice.ransack(params[:q])\n @info_practices = @q.result(distinct: true).page(params[:page])\n authorize @info_practices\n end",
"def searchauthor\n end",
"def index\n @people_1 = Hr::Job.category_job()\n @people_9 = Hr::Lookups::MyselfStatus.select_y\n params[:q] = params[:q] ? params[:q] : {}\n @search = Hr::Person.ransack(params[:q])\n @hr_people = @search.result.paginate(:page => params[:page])\n end",
"def search_new\n\n @searching = true\n results = Profile.joins(:user).includes(:user => [:photos]).user_readable(current_user).search_for(params['search-string'])\n @profiles = results.page(params[:page]).per(@@results_per_page)\n\n respond_to do |format|\n format.html { render action: \"index\" }\n format.json { render json: @project.errors, status: :unprocessable_entity }\n end\n\n end",
"def index\n @institutes = Institute.all\n authorize Institute\n end",
"def index\n\t @contentPageHeading = 'My Companies'\n\t @companies = current_user.companies\n\t @user_companies = UserCompany.where(:user_id => current_user.id)\n\t # @public_companies = Array.new\n\t # for user_company in @user_companies\n\t # \t@public_companies.push(user_company,Company.find(user_company.pc_id))\n\t # end\n\t\t# @public_companies = Company.find(:all, :order => 'name', :conditions => 'id IN (SELECT pc_id FROM user_companies WHERE user_id = '+current_user.id.to_s+')')\n end",
"def index\n candidate = UspsInPersonProofing::Applicant.new(\n address: search_params['street_address'],\n city: search_params['city'], state: search_params['state'],\n zip_code: search_params['zip_code']\n )\n response = proofer.request_facilities(candidate)\n if response.length > 0\n analytics.idv_in_person_locations_searched(\n success: true,\n result_total: response.length,\n )\n else\n analytics.idv_in_person_locations_searched(\n success: false, errors: 'No USPS locations found',\n )\n end\n render json: response.to_json\n end"
] | [
"0.7037881",
"0.6689106",
"0.66761935",
"0.6341753",
"0.629902",
"0.6236115",
"0.62332314",
"0.61650485",
"0.612081",
"0.61120796",
"0.60854584",
"0.60741794",
"0.60646766",
"0.603476",
"0.6031838",
"0.60244644",
"0.5999334",
"0.59984946",
"0.5975857",
"0.59227735",
"0.59188056",
"0.59177583",
"0.5910675",
"0.58859426",
"0.58768433",
"0.58733624",
"0.5868427",
"0.58595806",
"0.5858003",
"0.5851738",
"0.583489",
"0.5830575",
"0.5813945",
"0.58063704",
"0.5798329",
"0.5765229",
"0.57605636",
"0.57593364",
"0.5758984",
"0.575898",
"0.57587117",
"0.5750913",
"0.5749486",
"0.57433647",
"0.57429045",
"0.5740868",
"0.57375866",
"0.573085",
"0.5724256",
"0.5723312",
"0.57107055",
"0.5710657",
"0.57046574",
"0.56933236",
"0.56866896",
"0.5682362",
"0.56799525",
"0.56786233",
"0.5676488",
"0.5674766",
"0.5671634",
"0.5671445",
"0.5669296",
"0.5665148",
"0.5663867",
"0.5656835",
"0.5655432",
"0.5654737",
"0.5652498",
"0.56492156",
"0.56441736",
"0.5641755",
"0.5632394",
"0.5631646",
"0.562702",
"0.56223106",
"0.5622163",
"0.5621877",
"0.562172",
"0.56208277",
"0.5617028",
"0.5609103",
"0.56083214",
"0.5608267",
"0.56041366",
"0.5600232",
"0.55997616",
"0.55984014",
"0.5597574",
"0.55958426",
"0.5594651",
"0.55921733",
"0.5590423",
"0.55900854",
"0.5588348",
"0.55877507",
"0.5585457",
"0.5583801",
"0.55832154",
"0.5583215"
] | 0.66935796 | 1 |
passes all Users grant_individual page | def grants_individual
@users = Array.new()
User.all.each do |u|
if !u.admin?
@users.push(u)
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def index\n @users = User.all\n @page_title = 'Управление топлива и транспорта'\n @authorized_user = User.find(session[:user_id])\n\n respond_to do |format|\n format.html {redirect_to(controller: 'users', action: 'home') if @authorized_user.grants & 256 == 0}\n format.json { render json: @users }\n end\n end",
"def index\n registered_user_information = UserInformation.find_by(user_id: \"#{@user.id}\")\n if !@user.is_elevated?\n if registered_user_information.nil?\n redirect_to new_user_information_url\n else\n redirect_to user_information_url registered_user_information.id\n end\n else\n @user_informations = UserInformation.all.order(updated_at: :desc).page(params[:page])\n authorize @user_informations\n respond_with(@user_informations)\n end\n end",
"def user_access_control_all\n @user = User.find(params[:user_id])\n\n unless !@user.admin? && current_user.admin? || current_user?(@user)\n response_access_denied\n end\n\n rescue\n response_access_denied\n end",
"def index\n # @users = User.all\n # authorize @users \n @users = policy_scope(User)\n authorize @users\n end",
"def match_user\n unless admin?\n user = User.find(current_user)\n\n unless user.id == set_user.id\n redirect_to user_path(current_user), notice: 'You do not have any permission to grant this page !'\n end\n end\n end",
"def admin_grant_permissions\n @user = User.includes(:perms).find(params[:id])\n authorize @user\n user_perms = current_user.perms\n @perms = user_perms & [Perm.grant_permissions, Perm.modify_templates, Perm.modify_guidance, Perm.use_api, Perm.change_org_details]\n end",
"def index\n \n @links_grid = initialize_grid(User)\n authorize! :manage, User\n \n respond_to do |format|\n format.html \n format.json { render json: @user }\n end\n end",
"def index\n\t\t@users = User.paginate(:page => params[:page], :per_page => 5)\n\t\tauthorize @users\n\tend",
"def index\n @entrepreneurs = Entrepreneur.all\n authorize Entrepreneur\n end",
"def index\n @users = User.all\n authorize User\n end",
"def check_access_control_all\n @user = User.find(params[:user_id])\n\n response_access_denied unless current_user.has_role?(:admin) || current_user.id == @user.id\n rescue\n response_access_denied\n end",
"def index\n @grants = policy_scope(Grant.all)\n end",
"def index\n #authorize(User.new)\n @users = User.all #policy_scope(User)\n end",
"def index\n @users = User.all\n authorize @users\n end",
"def grant\n self.status = 'granted'\n save\n end",
"def index\n authorize! :manage, :multiple_users\n\n respond_with(users)\n end",
"def index\n @accounts = current_user.accounts\n @wallet = current_user.wallet\n authorize @accounts\n end",
"def index\n @loan_manager_profiles = LoanManagerProfile.all\n authorize LoanManagerProfile\n end",
"def index\n @users = policy_scope(User).page(params.fetch(:page, 1)).per(params.fetch(:per, 25))\n end",
"def index\n @users = User.where(:activate => true)\n authorize! :update, @users\n end",
"def index\n @permitted_users = PermittedUser.all\n end",
"def authorize_user\n unless current_user.id == @profile.user_id\n flash[:unauthorized] = \"Not authorized\"\n redirect_to listings_path\n end \n end",
"def authorize_for_all_guests\n # Clear all authorizations and create an allow-all entry\n #ContentAuthorization.transaction do\n update_privacy_level(ContentAuthorization::AuthPrivate)\n clear_accessors\n #end\n end",
"def index\n puts \"the current user is#{restrict_access.id}\"\n @users = User.all\n respond_with @users\n end",
"def all_phi_allowed_by\n self.class.__user_id_string(all_phi_context)\n end",
"def render_user_selection_page\n @users = User.all\n\n # Build auth replay url for each user\n @replay = {}\n\n @users.each do |user|\n @replay[user.id] = {}\n @replay[user.id][:url] = \"#{proceed_api_openid_provider_path(user_uid: user.uid)}\"\n\n @replay[user.id][:access] = false\n @replay[user.id][:access_count] = 0\n user.groups.each do |group|\n if (group.app == current_app)\n @replay[user.id][:access] =true\n @replay[user.id][:access_count] += 1\n end\n end\n end\n\n render template: \"shared/auth/select_user_to_login\", layout: 'application'\n end",
"def authorize_user\n redirect_to restrooms_path, flash: {message: \"You don't have permission to make changes to another user's profile.\"} unless current_user.admin? || @user == current_user\n end",
"def authorize_user\n if @user.id != current_user.id\n redirect_to \"/\", notice: 'You are not allowed the given operation' and return\n end\n end",
"def permit\n user = User.find(params[:user_id])\n group = Group.find(params[:id])\n membership = Membership.find_by(user: user, group: group)\n if membership\n membership.update_column(:active, true)\n membership.touch # For showing on dashboard.\n end\n redirect_back fallback_location: '/'\n end",
"def authorise\n # checks can go here (is the application registered for example)\n grant = generate_grant\n valid_grants << grant\n grant\n end",
"def set_and_authorize_grant\n @grant = Grant.find_by(slug: params[:id]) || Grant.find(params[:id])\n authorize(@grant)\n\n @grant\n end",
"def index\n @accountant_profiles = AccountantProfile.all\n\n authorize AccountantProfile\n end",
"def index\n @profiles = Profile.all\n authorize Profile\n end",
"def index\n # authorize! :index, @user, :message => 'Not authorized as an administrator.'\n @users = User.all\n end",
"def grant\n begin\n @account = Account.has_permission(current_user).find(params[:account_id])\n @account.grant_access\n rescue => e\n NotificationMailer.oddity(\"Something with grant access went wrong. Error: #{e}\").deliver\n end\n\n begin\n @user = User.find(@account.account_owner)\n subscription = @account.current_subscription()\n\n if subscription.renewal\n NotificationMailer.membership_renewal_activation(@user).deliver\n else\n NotificationMailer.membership_activation(@user).deliver\n end\n\n rescue => e\n NotificationMailer.oddity(\"Could not send grant access email. Error: #{e}\").deliver\n end\n\n respond_to do |format|\n format.html { redirect_to(awaiting_account_activation_url) }\n format.js\n end\n end",
"def admin_grant_permissions\n user = User.find(params[:id])\n authorize user\n\n # Super admin can grant any Perm, org admins can only grant Perms they\n # themselves have access to\n perms = if current_user.can_super_admin?\n Perm.all\n else\n current_user.perms\n end\n\n render json: {\n 'user' => {\n 'id' => user.id,\n 'html' => render_to_string(partial: 'users/admin_grant_permissions',\n locals: { user: user, perms: perms },\n formats: [:html])\n }\n }.to_json\n end",
"def index\n @authorizedusers = Authorizeduser.all\n end",
"def restrict_users\n \t\tif user_signed_in?\n \t\t\tif current_user.has_role? :client\n \t\t\t\tif current_user.profile.agreed == nil\n \t\t\t\t\tredirect_to edit_profile_path(current_user.profile)\n \t\t\t\tend\n \t\t\tend\n\n \t\tend\n\n \tend",
"def verifica_perfil\n @user = User.find(session[:login])\n\t\n \tbegin\n \t@auth_user = User.find(params[:id])\n \tif @user.perfil != 1\n \t\tif action_name != 'ver'\n \t\t\tredirect_to(bot_path)\n \t\telse\tif @auth_user != @user\n \t\t\tredirect_to(bot_path)\t\n \t\t\tend\t\n \t\tend\t\t\n \tend\n \trescue\n \t\tif @user.perfil != 1\n \t\t\tredirect_to(bot_path)\n \t\tend\n \tend\n end",
"def index\n respond_with current_user\n end",
"def permit_user\n if (!current_user.lunches_admin?) \n flash[:alert] = 'You not allowed to see all orders.'\n respond_to do |format| \n format.html {redirect_to(root_url)}\n end\n end\n end",
"def list_users_for_all_tenants(args = {}) \n get(\"/users.json/global\", args)\nend",
"def list_users_for_all_tenants(args = {}) \n get(\"/users.json/global\", args)\nend",
"def index\n if current_user.present? and current_user.role == \"SO\"\n @assigneds = Assigned.all.order( :grantor, :grantee )\n else\n @assigneds = Assigned.where( grantor: current_user.name ).order( :grantor, :grantee )\n end\n end",
"def index\n @user = User.find(params[:user_id])\n @conditions = @user.conditions\n\n if current_user.id == @user.id\n\t\t\trender action: :index\n\t\telse\n\t\t\trender file: 'public/denied'\n\t\tend\n end",
"def phi_allowed_by\n phi_context[:user_id]\n end",
"def index\n @profiles = Profile.all\n #authorize Profile\n #@profiles = policy_scope(Profile)\n end",
"def denied\n @denied_users = User.where(isDenied:true).where(isApproved:false).paginate(:page => params[:denied_users_page], :per_page => 10)\n end",
"def profile\n if !GraderConfiguration['system.user_setting_enabled']\n redirect_to :controller => 'main', :action => 'list'\n else\n @user = current_user;\n end\n end",
"def awaiting\n @accounts = current_user.agent_profile.get_awaiting_access_accounts\n\n respond_to do |format|\n format.html\n end\n end",
"def user_access_control_account_all\n @account = Account.find(params[:account_id])\n user = @account.user\n\n unless !user.admin? && current_user.admin? || current_user?(user)\n response_access_denied\n end\n\n rescue\n response_access_denied\n end",
"def grant\n id = params[:id]\n @participant = AssignmentParticipant.find(id) unless id.nil?\n @user = User.find(session[:user].id)\n end",
"def authorize\n \t\t#teste http://localhost:3000/products/2/who_bought.atom\n \t\t#ou curl --silent --user dave:secret http://localhost:3000/products/2/who_bought.atom \n \t\treturn if User.count.zero?\n\n \t\tif request.format == Mime[:html]\n \t\t\tuser = User.find_by(id: session[:user_id])\n \t\telse\n \t\t\tuser = authenticate_or_request_with_http_basic do |u,p|\n \t\t\t\tUser.find_by_name(u).try(:authenticate, p)\n \t\t\tend\n \t\tend\n\n \t\tredirect_to login_url, notice: \"Please log in\" unless user\n \t\t\n \tend",
"def index\n authorize Student\n if User.find_by_email(current_user.email).provider == 'facebook' and Student.find_by_email(current_user.email).education == 'DUMMY'\n redirect_to edit_student_path(Student.find_by_email(current_user.email)), notice: 'Please fill out details first !'\n end\n\n @students = Student.all\n # authorize Student\n end",
"def index\n redirect_to profile_path(current_user)\n end",
"def index\n @bookings = policy_scope(current_user.bookings)\n authorize @bookings\n @experiences = policy_scope(current_user.experiences)\n authorize @experiences\n end",
"def index\n User.all #take all auth stuff\n end",
"def index\n \t@users = User.accessible_by(current_ability)\n \tauthorize! :read, User\n end",
"def index\n @relatorio_gerals = RelatorioGeral.all.page(params[:page]).per(15)\n authorize @relatorio_gerals\n end",
"def index\n # set_user, is the only action taken here\n end",
"def index\n # set_user, is the only action taken here\n end",
"def index\n @users = BranchOffice.find(params[:branch_office_id]).users.employee\n authorize @users\n end",
"def check_allowed\n unless $allowed_users.include?(current_user.email)\n flash[:alert] = \"That information is not available.\"\n redirect_to :action => 'welcome', :controller => 'info'\n end\n end",
"def index\n @users = policy_scope(User)\n end",
"def index\n @regdetails = Regdetail.select(:user_id).distinct\n @user_ids = @regdetails.collect { |r| r.user_id }\n @registered_users = User.find(@user_ids).paginate(:page => params[:page], :per_page => 5)\n # @registered_users = @registered_users.paginate(:page => params[:page], :per_page => 4)\n\n end",
"def index\n @users = policy_scope(User)\n .includes(:avatar, :main, :rank)\n .order(created_at: :desc)\n .page(params[:page])\n\n @users = @users.where(hidden: false) unless params[:hidden]\n\n authorize @users\n end",
"def show\n\t\t@user = User.find(params[:id])\n\t\tauthorize @user\n\tend",
"def gold\n authorize! :view, :gold, :message => 'Access limited to subscribers.'\n end",
"def grant(userid=nil, email=nil, download=true, upload=false, view=true, admin=false, delete=false, notifyupload=false, notifydownload=false) #(userid OR email) required\n end",
"def index\n\n #Make sure only logged in admins can manipulate users\n\n if @loggedinuser && @loggedinuser.authorizationlevel >= 4\n else \n redirect_to '/'\n end\n end",
"def show\n @users = User.all\n authorize @colaboradore\n end",
"def listings\n authorize! :read, @user\n end",
"def grant(userid=nil, email=nil, download=true, upload=false, view=true, admin=false, delete=false, notifyupload=false, notifydownload=false) #(userid OR email) required\n end",
"def index\n redirect_to user_path(current_user)\n end",
"def index\n @users = User.all\n authorize @users\n\n render json: @users\n end",
"def index\n if current_user.has_role?(:superadmin)\n @users = User.with_role(:member).\n order(:email).\n search(filter).\n paginate(page: params[:page], per_page: 12)\n else\n redirect_to authenticated_root_url, alert: 'You are not authorized to access this page.'\n end\n end",
"def index\n if !GraderConfiguration['system.user_setting_enabled']\n redirect_to :controller => 'main', :action => 'list'\n else\n @user = User.find(session[:user_id])\n end\n end",
"def index\n @authorized_people = AuthorizedPerson.all\n end",
"def index\n #@event_users = EventUser.where(event: @event)\n #@event_users = EventUserPolicy::Scope.new(current_user, [:admin, EventUser]).resolve\n @event_users = policy_scope(EventUser.where(event: @event))\n authorize @event_users\n\n add_breadcrumb \"#{@event.code}\", :admin_event_path\n add_breadcrumb \"Usuários\", :admin_event_users_path\n end",
"def index\n if current_user.amount.nil?\n redirect_to users_p20_path(current_user.id)\n else\n redirect_to user_path(current_user.id)\n end\n end",
"def index\n @organization_profiles = policy_scope(OrganizationProfile).all\n authorize User\n end",
"def index\n if current_user.role == \"super_admin\" || current_user.role == \"admin\" #[99, 100]\n @users = User.all\n else\n redirect_to user_path(current_user)\n end\n end",
"def run_filters\n set_user\n authorize\n end",
"def allowed_user\n unless current_user.id == Design.find(params[:design_id]).user.id\n flash[:notice] = 'Voce não tem permissão para fazer isso!'\n redirect_to root_url\n end\n end",
"def index\n @users = User.find_all_with_authorization(current_user)\n end",
"def index\n @tutor_groups = TutorGroup.where(academic_session_id: session[:global_academic_session])\n if !@tutor_groups.blank?\n authorize @tutor_groups\n end\n \n end",
"def check_user_access\n check_access_and_redirect(@request)\n end",
"def set_grantee\n @grantee = Grantee.find(params[:id])\n end",
"def index\n authorize! :show, params[:controller]\n @grid = initialize_grid(SysUser, per_page: 4) #wice grid\n end",
"def index\n @cost_setups = CostSetup.all.paginate(page: params[:page], per_page: 15)\n authorize User\n end",
"def declared_user\n if logged_in?\n if Protege.find_by(user_id: current_user.id.to_i).nil?\n redirect_to root_path, :alert => \"Access denied.\"\n end\n else\n redirect_to root_path, :alert => \"Access denied.\"\n end\n end",
"def render_user_selection_page\n @users = User.all\n\n # Remove any group_id param in the url\n saml_url_without_user_id = request.original_url.gsub(/(&user_uid=[^&]*)/,\"\")\n\n # Build SAML replay url for each user\n @replay = {}\n\n @users.each do |user|\n @replay[user.id] = {}\n @replay[user.id][:url] = \"#{saml_url_without_user_id}&user_uid=#{user.uid}\"\n\n @replay[user.id][:access] = false\n @replay[user.id][:access_count] = 0\n user.groups.each do |group|\n if (group.app == current_app)\n @replay[user.id][:access] =true\n @replay[user.id][:access_count] += 1\n end\n end\n end\n\n render template: \"shared/auth/select_user_to_login\", layout: 'application'\n end",
"def authorize_users\n authorize :user\n end",
"def index\n @pagy, @redirects = pagy(policy_scope(Redirect).all)\n authorize @redirects\n end",
"def authorize_for_all\n # Clear all authorizations and create an allow-all entry\n ContentAuthorization.transaction do\n update_privacy_level(ContentAuthorization::AuthPublic)\n clear_accessors\n end\n end",
"def platinum\n authorize! :view, :platinum, :message => 'Access limited to teachers only.'\n end",
"def all_users\n render \"all_users\"\n end",
"def create_non_admin_user_authenticate\n post '/users', 'username' => 'testuser', 'password' => 'testpassword', 'email_address' => 'test@toto.com'\n id_user = last_response.json_body['id']\n digest_authorize 'testuser', 'testpassword'\n id_user\nend",
"def allowedusers_only\n \n\t\tallowed_users=[VendorPortal::Application.config.operationadmin.to_s,VendorPortal::Application.config.operationuser.to_s,VendorPortal::Application.config.vendor.to_s]\n\t\n if !current_user.userrole.in?(allowed_users)\n redirect_to root_path, :alert => \"Access denied.\"\n end\n end",
"def allowedusers_only\n \n\t\tallowed_users=[VendorPortal::Application.config.operationadmin.to_s,VendorPortal::Application.config.operationuser.to_s,VendorPortal::Application.config.vendor.to_s]\n\t\n if !current_user.userrole.in?(allowed_users)\n redirect_to root_path, :alert => \"Access denied.\"\n end\n end"
] | [
"0.6506483",
"0.6187792",
"0.60953796",
"0.60393095",
"0.6032976",
"0.5996649",
"0.599077",
"0.59881985",
"0.595903",
"0.5936245",
"0.59281325",
"0.5910543",
"0.58918375",
"0.58573866",
"0.5854398",
"0.5803139",
"0.5795631",
"0.5794108",
"0.5765949",
"0.5730097",
"0.571552",
"0.5696179",
"0.5687519",
"0.56816584",
"0.567793",
"0.5674211",
"0.56659967",
"0.56648695",
"0.56595135",
"0.56564635",
"0.5654728",
"0.5639218",
"0.56304574",
"0.56183213",
"0.56103027",
"0.5608462",
"0.5600172",
"0.5587488",
"0.55786955",
"0.55727255",
"0.5571239",
"0.5570071",
"0.5570071",
"0.55647904",
"0.55553794",
"0.5543733",
"0.55424505",
"0.5538687",
"0.55229646",
"0.55188113",
"0.55053014",
"0.5501512",
"0.5499607",
"0.54969335",
"0.5492091",
"0.54912066",
"0.5483114",
"0.546873",
"0.5466864",
"0.5464836",
"0.5464836",
"0.54642016",
"0.5462766",
"0.54615384",
"0.54605204",
"0.5453796",
"0.5453143",
"0.5452905",
"0.5445933",
"0.5433002",
"0.54324365",
"0.54322505",
"0.54308146",
"0.5428855",
"0.5421714",
"0.54209185",
"0.5420047",
"0.54184604",
"0.54168135",
"0.54164964",
"0.5413708",
"0.5413153",
"0.5411364",
"0.54093504",
"0.5404537",
"0.5399057",
"0.53989506",
"0.53931993",
"0.5392868",
"0.53919137",
"0.5390793",
"0.5389981",
"0.5389107",
"0.53882843",
"0.5388002",
"0.53832555",
"0.5377197",
"0.53770196",
"0.5373138",
"0.5373138"
] | 0.6908038 | 0 |
change client password for the solus admin | def change_password(username, password)
perform_request({:action => "client-updatepassword", :username => username, :password => password})
statusmsg.match /success/i
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def change_password!(password)\n json = JSON.generate(:changePassword => { :adminPass => password })\n @compute.connection.req('POST', \"/servers/#{@id}/action\", :data => json)\n @adminPass = password\n end",
"def change_password(username, new_password)\n perform_request({:action => \"client-updatepassword\", :username => username, :password => new_password})\n end",
"def change_password\n request_params = {\n host_url_with_protocol: host_url_with_protocol,\n host_url: host_url,\n entity_type: GlobalConstant::TemplateType.change_password_template_type\n }\n service_response = GlobalConstant::StTokenSale.get_client_details(request_params)\n\n # Check if error present or not?\n unless service_response.success?\n render_error_response(service_response)\n return\n end\n\n @presenter_obj = ::Web::Client::Setup.new(service_response, params)\n redirect_to '/token-sale-blocked-region', status: GlobalConstant::ErrorCode.temporary_redirect and return if @presenter_obj.is_blacklisted_ip?(get_ip_to_aml_countries)\n set_page_meta_info(@presenter_obj.custom_meta_tags)\n end",
"def change_password(user, password, options = nil)\n raise Aerospike::Exceptions::Aerospike.new(INVALID_USER) unless @cluster.user && @cluster.user != \"\"\n policy = create_policy(options, AdminPolicy, default_admin_policy)\n\n hash = LoginCommand.hash_password(password)\n command = AdminCommand.new\n\n if user == @cluster.user\n # Change own password.\n command.change_password(@cluster, policy, user, hash)\n else\n # Change other user's password by user admin.\n command.set_password(@cluster, policy, user, hash)\n end\n\n @cluster.change_password(user, hash)\n end",
"def change_password\r\n \r\n end",
"def password=(new_password); end",
"def change_password(opts = {})\n password(opts.merge(:verb => :put))\n end",
"def change_consolepass(vid, new_password)\n perform_request(:action => 'vserver-consolepass', :vserverid => vid, :consolepassword => new_password)\n end",
"def change_password!(opts = {})\n password!(opts.merge(:verb => :put))\n end",
"def set_change_password(user)\n user.update_column(:must_change_passwd, true)\n session[:pwd] = 1\n end",
"def set_password(user, newpassword)\n\t\t\t\t# We are going to do this with an expect script instead of in pure ruby...\n\t\t\t\t# partly because doing it in pure ruby turns out to be pretty tricky since\n\t\t\t\t# ruby doesn't have very good tools for interacting with shell programs, but\n\t\t\t\t# also because Expect does, and we like doing things with the right tool\n\t\t\t\t\n\t\t\t\tCfruby.controller.attempt(\"Changing password for \\\"#{user}\\\"\", 'destructive') {\n\t\t\t\t\t# we must be running as root\n\t\t\t\t\tif(Process.euid() != 0)\n\t\t\t\t\t\traise(ChangePasswordError, \"Passwords can only be set by root\")\n\t\t\t\t\tend\n\t\t\t\t\n\t\t\t\t\t# first check for the existence of expect\n\t\t\t\t\thaveexpect = `/usr/bin/env expect -v`\n\t\t\t\t\tif(haveexpect !~ /expect version/i)\n\t\t\t\t\t\traise(ChangePasswordError, \"Expect binary could not be found\")\n\t\t\t\t\tend\n\t\t\t\t\n\t\t\t\t\t# create a specialized expect script to change the password\n\t\t\t\t\t# and run it\n\t\t\t\t\tchangepass = <<CHANGEPASS\n#!/usr/bin/env expect\n\nspawn passwd #{Cfruby::Exec.shellescape(user)}\nset password \"#{newpassword.gsub(/(\")/, \"\\\\\\1\")}\"\nexpect \"New password:\"\nsend \"$password\\\\r\"\nexpect \"password:\"\nsend \"$password\\\\r\"\nexpect eof\nCHANGEPASS\n\n\t\t\t\t\tscriptfile = Tempfile.new('cfruby')\n\t\t\t\t\tCfruby::FileOps.chmod(scriptfile.path, \"u+x,go-rwx\")\n\t\t\t\t\tscriptfile.print(changepass)\n\t\t\t\t\tscriptfile.close(false)\n\t\t\t\t\t`cp #{scriptfile.path} ./footest`\n\t\t\t\t\toutput = Cfruby::Exec.exec(scriptfile.path)\n\t\t\t\t}\n\t\t\tend",
"def change_password\n # https://github.com/site5/lumberg\n server = Lumberg::Whm::Server.new(host: HOST_NAME, hash: `cat #{HASH_FILE_PATH}`)\n cp_email = Lumberg::Cpanel::Email.new(server: server, api_username: @username)\n @password = SecureRandom.urlsafe_base64(12)\n process_options = { domain: @domain, email: @email, password: @password }\n passwd_result = cp_email.change_password(process_options)\n if passwd_result[:params][:data][0][:reason] == ''\n puts \"Successfully changed password of #{@email}\"\n time = Time.new\n logtime = time.strftime('%Y-%m-%d %H:%M')\n File.open(\"#{LOG_FILE_PATH}\", 'a') { |logfile| logfile.puts \"#{logtime}: #{@email}\" }\n else\n # Print c-panel error message if failed to change the password\n puts \"#{passwd_result[:params][:data][0][:reason]}\"\n end\n end",
"def passwd\r\n @password = \"12345\"\r\n end",
"def set_password(user, password)\n\t\t\t\t`echo \"#{shellescape(password)}\" | /usr/sbin/pw usermod #{shellescape(user)} -h 0`\n\t\t\tend",
"def password=(value)\n conf['api']['password'] = value\n end",
"def edit_password; end",
"def change_temp_password\n\tend",
"def change_rootpassword(vid, new_password)\n perform_request(:action => 'vserver-rootpassword', :vserverid => vid, :rootpassword => new_password)\n end",
"def change_password(new_password)\n update_password(new_password)\n @password = new_password\n puts 'Success! Your password has been changed.'.colorize(:light_green)\n @prompt.keypress('Press any key to continue..')\n menu = Menu.new(self)\n menu.account_details\n end",
"def reset_password\n set :password, Proc.new {\n Capistrano::CLI.password_prompt(\"Password (for user: #{user}): \")\n } \n end",
"def newPassword(newPass)\n\t\tDATABASE.edit(\"users\", \"password\", newPass, \"username\", @username)\n\tend",
"def change_password(new_pass)\n self.salt = User.gen_salt\n salted_pass = User.salt_password(new_pass, self.salt)\n self.password = salted_pass\n end",
"def change_password(old_password, new_password)\n old_password_tb.type_text(old_password)\n new_password_tb.type_text(new_password)\n password_confirm_tb.type_text(new_password)\n change_pw_btn.click\n end",
"def change_customer_password\n enc_password = Authentication::Encryptor.digest([password, ::GlobalConstant::CUSTOMER_PEPPER].join)\n account = Authentication::Account.find_by(email: username)\n account.customer_password = enc_password\n account.save!\n {'success' => true}\n end",
"def update_password\n @admin.password = Admin.get_encrypted_password(@new_password, @login_salt_d)\n @admin.save!\n end",
"def set_admin_user(user, password)\n @user = user\n @password = password\n end",
"def update_with_password(params, *options); end",
"def change_rootpassword(vid, new_password)\n perform_request(action: 'vserver-rootpassword', vserverid: vid, rootpassword: new_password)\n end",
"def update_password\n\t\t@admin = current_admin\n\t\tif(@admin.update_attributes(params[:admin]))\n\t\t\tredirect_to root_path\n\t\telse\n\t\t\trender :action => \"reset\"\n\t\tend\n\tend",
"def change_password(new_password)\n put(\"\", {:password => new_password})\n @password = new_password\n end",
"def change_password(target_address, old_password, new_password)\n uri = URI.parse(\"#{target_address}/user/neo4j/password\")\n response = Net::HTTP.post_form(uri, 'password' => old_password, 'new_password' => new_password)\n JSON.parse(response.body)\n end",
"def change_password(username, old_pw, new_pwd)\n self.user.change_password(username, old_pw, new_pwd)\n end",
"def test_change_password\n response=@root_key_api.change_password(\"test-rb@precog.com\",\"password\",@account_id, \"xyzzy\")\n response=@root_key_api.change_password(\"test-rb@precog.com\",\"xyzzy\",@account_id, \"password\")\n end",
"def set_password(pass)\n self.user_password = User.crypt(pass)\n end",
"def service_password( password )\n\t\t\tself.password_digest = Digest::SHA2.hexdigest( password )\n\t\t\tDRbService.log.debug \"Setting encrypted password for %p to \"\n\t\tend",
"def old_password=(string)\n end",
"def change_password\n set_breadcrumbs(\"change_password\") \n if request.post? || request.patch? \n admin = Admin.find(current_admin.id)\n @check = params[:admin][:password] == params[:admin][:password_confirmation] && params[:admin][:password].present? && params[:admin][:password_confirmation].present?\n if admin.present? && admin.valid_password?(params[:admin][:old_password])\n if @check \n if admin.update_attribute(:password, params[:admin][:password])\n sign_in admin, :bypass => true\n flash[:notice] = I18n.t('change_password.update.success')\n redirect_to admin_root_path\n else\n flash[:error] = I18n.t('common.error') \n end\n else\n flash[:error] = I18n.t('change_password.failure.password_is_not_match')\n end\n else\n flash[:error] = I18n.t('change_password.failure.invalid_old_password')\n end\n end\n end",
"def change_password(username,password,new_password,quick=true)\n if (!username.kind_of?(String))\n raise ArgumentError, \"Expected String for username, but #{username.class} provided.\"\n end\n\n if (!password.kind_of?(String))\n raise ArgumentError, \"Expected String for password, but #{password.class} provided.\"\n end\n\n if (!new_password.kind_of?(String))\n raise ArgumentError, \"Expected String for new_password, but #{new_password.class} provided.\"\n end\n\n # open a socket to the server\n socket = open_socket()\n\n # make start packet\n header = TacacsPlus::TacacsHeader.new\n if (@session_id)\n header.session_id = @session_id\n else\n header.randomize_session_id!\n end\n body = TacacsPlus::AuthenticationStart.new\n body.action_chpass!\n body.authen_type_ascii!\n body.priv_lvl = 1\n body.user = username if (quick)\n\n session = ClientSession.new()\n session.request = PacketStruct.new(header,body)\n session.type = :authentication\n session.getuser = username\n session.getpass = new_password\n session.getdata = password\n\n # process server dialog\n attempt = process_response(session, socket)\n\n return(attempt)\n end",
"def pw_admin\n @user = User.find(params[:id])\n new_password = Devise.friendly_token(50)\n user.reset_password(new_password, new_password)\n user.send_rest_password_instructions\n end",
"def change_password\n @user = current_user\n end",
"def change_password\n @user = current_user\n end",
"def change_enable_password(username,password,new_password,quick=true)\n if (!username.kind_of?(String))\n raise ArgumentError, \"Expected String for username, but #{username.class} provided.\"\n end\n\n if (!password.kind_of?(String))\n raise ArgumentError, \"Expected String for password, but #{password.class} provided.\"\n end\n\n if (!new_password.kind_of?(String))\n raise ArgumentError, \"Expected String for new_password, but #{new_password.class} provided.\"\n end\n\n # open a socket to the server\n socket = open_socket()\n\n # make start packet\n header = TacacsPlus::TacacsHeader.new\n if (@session_id)\n header.session_id = @session_id\n else\n header.randomize_session_id!\n end\n body = TacacsPlus::AuthenticationStart.new\n body.action_chpass!\n body.authen_type_ascii!\n body.service_enable!\n body.priv_lvl = 1\n body.user = username if (quick)\n\n session = ClientSession.new()\n session.request = PacketStruct.new(header,body)\n session.type = :authentication\n session.getuser = username\n session.getpass = new_password\n session.getdata = password\n\n # process server dialog\n attempt = process_response(session, socket)\n\n return(attempt)\n end",
"def change_password\n #check if user is new or being updated\n if self.encrypted_password.present?\n #verifies password\n if self.password_check\n self.encrypt_password\n else\n raise \"error\"\n end\n else\n raise \"error\"\n end\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end",
"def edit_change_password\n @user = current_user\n end",
"def set_password; nil; end",
"def update_password(newpwd)\n self.password = Utils.sha1(newpwd + 'ad2012spot' + email)\n end",
"def change_password\n\t\t# if admin, allow editing user's password\n if current_user.is_admin?\n @user = User.find(params[:id])\n else\n @user = current_user\n end\n end",
"def password=(value)\n reset_agent\n @password = value\n end",
"def password=(value)\n reset_agent\n @password = value\n end",
"def password=(value)\n reset_agent\n @password = value\n end",
"def change_password(old_password, new_password)\n @call_params[:old_password] = old_password\n @call_params[:new_password] = new_password\n @client.call(self.class, __callee__.to_s, @call_params)\n end",
"def password=(password)\n raise ArgumentError, \"The new password cannot be nil\" unless password\n raise ArgumentError, \"The new password cannot be empty\" if password.empty?\n\n self.service.editObject({ \"password\" => password.to_s })\n self.refresh_details()\n end",
"def password=(password)\n raise ArgumentError, \"The new password cannot be nil\" unless password\n raise ArgumentError, \"The new password cannot be empty\" if password.empty?\n\n self.service.editObject({ \"password\" => password.to_s })\n self.refresh_details()\n end",
"def set_password(v)\n @password = v\n # returns v\n end",
"def update\n respond_to do |format|\n new_password = params[:user_admin][:password] \n password = new_password.present? ? @user_admin.encrypt_password(new_password) : @user_admin.password\n if @user_admin.update(user_admin_params.merge(password: password))\n format.html { redirect_to @user_admin, notice: 'User admin was successfully updated.' }\n format.json { render :show, status: :ok, location: @user_admin }\n else\n format.html { render :edit }\n format.json { render json: @user_admin.errors, status: :unprocessable_entity }\n end\n end\n end",
"def password=(new_password)\n @password = new_password\n end",
"def set_APIPassword(value)\n set_input(\"APIPassword\", value)\n end",
"def password= new_password\n @password = new_password\n end",
"def reset_super_admin_password\n @admin_=current_admin\n if @admin_\n old_password=admin_params[:password]\n new_password=admin_params[:new_password]\n new_password_confirmation=admin_params[:password_confirmation]\n if passwords_are_vlalid(new_password, new_password_confirmation, 'update_super_admin')\n @admin_=Admin.authenticate(@admin_.username, old_password)\n if @admin_\n if @admin_.update_attribute(:new_password, new_password)\n flash[:success]=\"Updated succesffuly \"\n destroy\n else\n flash.now[:error]=\"Error, couldn't update password\"\n @admin=current_admin\n @admin_=current_admin\n render 'update_super_admin'\n end\n else\n\n flash.now[:error]=\"Old password is incorrect \"\n @admin=current_admin\n @admin_=current_admin\n render 'update_super_admin'\n end\n\n else\n @view_super_admin_change_password_form=true\n return\n end\n else\n redirect_to admin_index_path\n end\n end",
"def change_password\n #raise current_user.inspect\n end",
"def password(value)\n @config[:password] = value\n end",
"def change_password(params)\n response = nexus.post(nexus_url(\"service/local/users_changepw\"), :body => create_change_password_json(params), :header => DEFAULT_CONTENT_TYPE_HEADER)\n case response.status\n when 202\n return true\n when 400\n raise InvalidCredentialsException\n else\n raise UnexpectedStatusCodeException.new(response.status)\n end\n end",
"def password_change_new\n\n end",
"def update_current_logged_in_users_password(args = {}) \n put(\"/users.json/current/password\", args)\nend",
"def change_password\n @user=User.find(current_user)\n end",
"def set_Password(value)\n set_input(\"Password\", value)\n end"
] | [
"0.793124",
"0.7350705",
"0.71624875",
"0.69166595",
"0.69084907",
"0.6891743",
"0.6863151",
"0.68233085",
"0.674484",
"0.6740427",
"0.67357725",
"0.6700214",
"0.6661063",
"0.6637813",
"0.66173005",
"0.65869164",
"0.6579392",
"0.65749407",
"0.6556639",
"0.6524318",
"0.6520393",
"0.65074307",
"0.65029997",
"0.6490618",
"0.6478387",
"0.64682305",
"0.64617187",
"0.6427898",
"0.6402628",
"0.63810414",
"0.63799447",
"0.63748103",
"0.6368375",
"0.6345693",
"0.63426924",
"0.63377863",
"0.63341874",
"0.63190615",
"0.6296867",
"0.6281606",
"0.6281606",
"0.62785846",
"0.62774557",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6274337",
"0.6273314",
"0.6273314",
"0.62705135",
"0.62677747",
"0.62643886",
"0.62591904",
"0.6257615",
"0.6257615",
"0.6257615",
"0.6253523",
"0.6230286",
"0.6230286",
"0.621887",
"0.62105143",
"0.62070787",
"0.6200946",
"0.61976665",
"0.6187137",
"0.61854863",
"0.61829734",
"0.61721057",
"0.6163901",
"0.61399204",
"0.6137474",
"0.61301035"
] | 0.7255236 | 2 |
checks wether a specific client exists | def exists?(username)
perform_request({:action => 'client-checkexists', :username => username})
statusmsg.match /client exists/i
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def exists?(username)\n perform_request({:action => 'client-checkexists', :username => username})\n end",
"def client_exists(client_id, &callback)\n callback.call(@connected.containsKey(client_id))\n end",
"def client?\n !!@client\n end",
"def client?\n !!@client\n end",
"def registered?(item)\n @clients.has_key?(item) or @clients.has_value?(item)\n end",
"def known_client?(client_public_key)\n encoded_key = z85_encode(client_public_key)\n if @known_clients.empty?\n store_known_client(encoded_key)\n true\n else\n @known_clients.include?(encoded_key)\n end\n end",
"def is_client?\n client ? true : false\n end",
"def exist?(key)\n with_client do |client|\n !client.exists(build_key(key)).zero?\n end\n end",
"def client?\n roles.count == 1 && has_role?('client')\n end",
"def exist?(name, client = Client.new)\n client.droplets.all.any? { |x| x.name == name }\n end",
"def exists\n @client.call(self.class, __callee__.to_s, @call_params)\n end",
"def exists\n @client.call(self.class, __callee__.to_s, @call_params)\n end",
"def exists\n @client.call(self.class, __callee__.to_s, @call_params)\n end",
"def exists\n @client.call(self.class, __callee__.to_s, @call_params)\n end",
"def subclients?\n subclients.any?\n end",
"def client?\n role == 'client'\n end",
"def check_client\n unless client\n raise ClientNotSetup\n end\n unless client.connected?\n if raise_on_error\n raise ClientNotConnected\n else\n @logger.error 'Client not connected! Check internet connection'\n return false\n end\n end\n true\n end",
"def reachable?\n !!client\n end",
"def reachable?\n !!client\n end",
"def client?() false; end",
"def client?\n return @mode == :client\n end",
"def client?\r\n @connect_type == :client\r\n end",
"def client?\n return (server == false)\n end",
"def check_clientname_present\n if Tenant.find_by(:name =>(params[:clientname_id])).present?\n msg = \"Client Name already Exists\"\n else\n msg = \"\"\n end\n render json: {\"message\"=> msg}\n end",
"def fetch_and_validate_client\n @uuid = \"#{GlobalConstant::Client.sandbox_prefix_uuid}#{@uuid}\"\n @client = Client.where(uuid: @uuid).first\n\n return error_with_identifier(\"invalid_client_id\", \"am_cc_gpd_favc_1\") if @client.blank? ||\n @client.status != GlobalConstant::Client.active_status\n\n success\n end",
"def check_ai_client_doesnt_exist(client_name,client_mac,service_name)\n client_mac = client_mac.upcase\n message = \"Checking:\\tClient \"+client_name+\" doesn't exist\"\n command = \"installadm list -p |grep '#{client_mac}'\"\n output = execute_command(message,command)\n if output.match(/#{client_name}/)\n puts \"Warning:\\tProfile already exists for \"+client_name\n if $yes_to_all == 1\n puts \"Deleting:\\rtClient \"+client_name\n unconfigure_ai_client(client_name,client_mac,service_name)\n else\n exit\n end\n end\n return\nend",
"def chef_client?(env)\n provisioners(:chef_client, env).any?\n end",
"def verify_client_belongs_to_user\n return if @resource_for_web_app.is_admin?\n not_found(\"client does not belong to user\") if @resource_for_web_app.id.to_s != params[:id]\n end",
"def event_exists?(client)\n api = @settings['api'] || {}\n path = \"/events/#{client}\"\n request = Net::HTTP::Get.new(path)\n if api['user']\n request.basic_auth(api['user'], api['password'])\n end\n Net::HTTP.new(api['host'] || '127.0.0.1', api['port'] || 4567).start do |http|\n response = http.request(request)\n response.body\n end\n end",
"def valid_client?(request)\n (request.env[\"HTTP_X_FORWARDED_FOR\"] == \"127.0.0.1\") ||\n (request.env[\"HTTP_X_CLIENT_IDENTIFIER\"] == \"<< Client Identifier Here >>\") &&\n (request.env[\"HTTP_X_CLIENT_SECRET_KEY\"] == \"<< Secret Token Here >>\")\n end",
"def fetch_and_validate_client\n sandbox_uuid = \"#{GlobalConstant::Client.sandbox_prefix_uuid}#{@uuid}\"\n @client = Client.where(uuid: sandbox_uuid).first\n\n return error_with_identifier(\"invalid_client_id\", \"am_cc_gpd_favc_1\") if @client.blank? ||\n @client.status != GlobalConstant::Client.active_status\n\n @client_id = @client.id\n success\n end",
"def chef_client?\n chef_client.present?\n end",
"def exists_Client (nome)\n count= @dbh.select_one(\"SELECT count(*) FROM client WHERE Name=?;\",nome)\n return count[0]\n rescue DBI::DatabaseError => e\n puts \"An error occurred in exists_Client\"\n puts \"Error code: #{e.err}\"\n end",
"def registered? c, verbose = false\n if @clients[c][:nick].empty?\n if verbose then\n\tsend_errnr_to_client c, NR::ERR_NOTREGISTERED\n end\n \n return false\n elsif @clients[c][:user] != 1\n if verbose then\n\tsend_errnr_to_client c, NR::ERR_NOTREGISTERED\n end\n \n return false\n else\n return true\n end\n end",
"def fetch_and_validate_client\n @client = Client.get_from_memcache(@client_id)\n\n return error_with_identifier('invalid_client_id','sb_2') if\n @client.blank? || @client.status != GlobalConstant::Client.active_status\n\n success\n end",
"def exists_on_server?\n false\n end",
"def empty?\n @clients.empty?\n end",
"def isnew?\n if Client.new\n return false \n else\n return true \n end \n end",
"def has_deaf_client?\n !self.deaf_client.nil?\n end",
"def check_if_sync_client!\n self.class.check_if_sync_client!(client)\n end",
"def does_client_level_crosswalk_exist(reason_code_object)\n crosswalk_records = reason_code_object.reason_codes_clients_facilities_set_names.\n select{|rcsn| rcsn.client_id != nil}\n rcc_log.debug \"Does client level crosswalk exist: #{crosswalk_records.present? && crosswalk_records.length > 0}\"\n crosswalk_records.present? && crosswalk_records.length > 0\n end",
"def from_client?\n @origin == :client\n end",
"def connected_to_dropbox_as_client?\n connect_client if $client.blank?\n end",
"def relevant?(server)\n server.exists?\n end",
"def ensure_chef_client\n step(\" ensuring chef client exists\")\n return @chef_client if chef_client\n step( \" creating chef client\", :green)\n @chef_client = Chef::ApiClient.new\n @chef_client.name(fullname)\n @chef_client.admin(false)\n #\n # ApiClient#create sends extra params that fail -- we'll do it ourselves\n # purposefully *not* catching the 'but it already exists' error: if it\n # didn't show up in the discovery process, we're in an inconsistent state\n response = chef_api_server_as_admin.post_rest(\"clients\", { 'name' => fullname, 'admin' => false, 'private_key' => true })\n client_key.body = response['private_key']\n client_key.save\n @chef_client\n end",
"def validate_client\n if Doorkeeper.config.skip_client_authentication_for_password_grant\n client.present? || (!parameters[:client_id] && credentials.blank?)\n else\n client.present?\n end\n end",
"def include?(conn)\n return @uuids.include? conn.uuid\n end",
"def subscribed?(u)\n @clients.has_key?(u.signature)\n end",
"def exists?\n username && api_key\n end",
"def validate_client\n\n @client = CacheManagement::Client.new([@client_id]).fetch[@client_id]\n\n return validation_error(\n 'e_sam_4',\n 'invalid_api_params',\n ['invalid_client_id'],\n GlobalConstant::ErrorAction.default\n ) if @client.blank? || @client[:status] != GlobalConstant::Client.active_status\n\n r = fetch_client_eth_address\n return r unless r.success?\n\n success\n\n end",
"def chef_clients_running?(host_or_ip)\n ssh_cmd = ssh_cmd_base(host_or_ip)\n ssh_cmd << \"'if test -f /var/chef/cache/chef-client-running.pid; then flock -n /var/chef/cache/chef-client-running.pid true; fi'\"\n ssh_cmd = ssh_cmd.map(&:chomp).join(\" \")\n res = `#{ssh_cmd}`\n case $?.exitstatus\n when 0\n return false\n else\n return true\n end\n end",
"def connection_exists?(name)\n @connections.include?(name)\n end",
"def exist?(*args)\n get(*args)\n true\n rescue Veil::GroupNotFound, Veil::CredentialNotFound\n false\n end",
"def fetch_and_validate_client(client_id, err_prefix = 'u_eh_c')\n return client_not_found_response(\"#{err_prefix}:l_u_eh_fvc_1\") if client_id.blank?\n client = CacheManagement::Client.new([client_id]).fetch[client_id]\n return client_not_found_response(\"#{err_prefix}:l_u_eh_fvc_2\") if client.blank?\n success_with_data(client)\n end",
"def client(name)\n client_to_find = name\n @clients.find {|key, value| key == client_to_find } \n end",
"def exists?\n begin\n url_for(:resources_resource, credentials, id).head\n true\n rescue RestClient::Forbidden\n true\n rescue RestClient::ResourceNotFound\n false\n end\n end",
"def exists?\n begin \n CouchSpring.get uri\n true\n rescue\n false\n end \n end",
"def google_check_exist(client)\n path = CGI::unescape(@fields[:path])\n client.check_exist_by_path(path)\n end",
"def exists?(user_id)\n connect_cassandra\n rows = @client.execute(\"SELECT user_id FROM users WHERE user_id = '#{user_id}'\")\n !rows.empty?\nend",
"def exists?\n\t\t# Look for cluster\n\t\t@cluster_id = DSMAPIServer.find_server(@resource[:name], @resource[:storagecenter])\n\t\tif @cluster_id == nil\n\t\t\treturn false\n\t\telse\n\t\t\treturn true\n\t\tend\n\tend",
"def exists?\n begin \n CouchDB.get uri\n true\n rescue\n false\n end \n end",
"def validate_client_details\n return error_with_data(\n 'um_gbd_1',\n 'Client is not active',\n 'Client is not active',\n GlobalConstant::ErrorAction.default,\n {}\n ) if !@client.is_web_host_setup_done?\n\n success\n end",
"def exists?\n users(should_user, should_vhost)\n end",
"def validate_client_details\n\n return error_with_data(\n 'um_srpl_1',\n 'Client is not active',\n 'Client is not active',\n GlobalConstant::ErrorAction.default,\n {}\n ) if !@client.is_web_host_setup_done?\n\n success\n end",
"def find_client(cl_name)\n chef_clients.find{|ccl| ccl.name == cl_name }\n end",
"def create_client(client)\n resp = post CLIENT_API_PATH, params: client.as_json\n process_response(resp)\n client.id = extract_id_from_location_path(resp.headers['Location'])\n true\n rescue Errors::UnprocessableEntity\n false\n end",
"def native_client?(type)\n if @native_clients[type]\n return @native_clients[type]\n end\n @native_clients[type] = create_native_client(type, @connection_opts)\n end",
"def registerClient(client, svcname)\n match = @svcList.select{|s| s.name == svcname}\n if match == nil || match[0] == nil\n $Logger.error \"Unable to find service #{svcname}\"\n return\n end\n svc = match[0]\n\n # Only add it if its not the same oneenvd re-registerign\n match = svc.clientList.select{|c| c.name == client.name && c.zone == client.zone && c.port == client.port && c.host == client.host}\n if match == nil || match[0] == nil\n svc.clientList.push(client)\n end\n end",
"def registered?\n registrations = self.ring_server.read_all([:name,\n @service,\n nil,\n @identifier])\n registrations.any? { |registration| registration[2] == self }\n rescue DRb::DRbConnError\n @ring_server = nil\n false\n end",
"def drc_defined?\n defined?(DRCClient) && !DRCClient.config.nil?\n end",
"def activeConnections?\n if @connectedClients then do return true else return false end\n end",
"def authed?\n client.authed?\n end",
"def read?(person, client)\n if (! self.priv)\n return self.client == client\n end\n return (self.client == nil || self.client == client) &&\n (owner == person || owner.contacts.include?(person) || (!person.nil? && person.moderator?(client)))\n end",
"def connected?\n if @ticket && @soap_client\n return true\n end\n return false\n end",
"def validate_client_and_admin\n\n r = fetch_and_validate_client\n return r unless r.success?\n\n r = fetch_and_validate_admin\n return r unless r.success?\n\n success\n end",
"def client_active?\n @automation_client\n end",
"def registered?\n registrations = ring_server.read_all [:name, @service, nil, @identifier]\n registrations.any? { |registration| registration[2] == @object }\n rescue DRb::DRbConnError\n @ring_server = nil\n return false\n end",
"def resource_exists?\n reload!\n @exists = true\n rescue Google::Cloud::NotFoundError\n @exists = false\n end",
"def exist?\n request(:get)\n true\n rescue Stretcher::RequestError::NotFound\n false\n end",
"def client(name)\n @clients[name]\n end",
"def validate_client_and_admin\n r = fetch_and_validate_client\n return r unless r.success?\n\n r = fetch_and_validate_admin\n return r unless r.success?\n\n success\n end",
"def validate_client_and_admin\n r = fetch_and_validate_client\n return r unless r.success?\n\n r = fetch_and_validate_admin\n return r unless r.success?\n\n success\n end",
"def has_host?(wspace,addr)\n\t\twspace.hosts.find_by_address(addr)\n\tend",
"def service_with_id_exists?(app, id)\n app = getTableValue(app)\n id = getTableValue(id)\n json = CC.CLI.get_json(\"%{serviced} service list -v\")\n json.each do |service|\n return true if service[\"Name\"] == app && service[\"DeploymentID\"] == id\n end\n return false\n end",
"def is_one_available?\n begin\n client = OpenNebula::Client.new(@@one[:credentials], @@one[:endpoint])\n rescue Exception => e\n $LOG.error \"Unable to connect to ONE with message: #{e.message}\"\n return false\n end\n\n version = client.get_version\n # Try to get ONE version just to check if it's possible to connect to ONE\n if version.is_a? OpenNebula::Error\n $LOG.error 'Unable to find out ONE version with message: '+version.message\n return false\n end\n $LOG.info 'Connection with ONE verified.'\n\n return true\n end",
"def exist?(service)\n !service_list[service].nil?\n end",
"def has_history?\n Ticket.client(client).any?\n end",
"def path_exist(name, desc, *clients, **options, &block)\n path(name, desc, *clients, options.merge(must_be: :exist),\n &block)\n end",
"def znode_exists?(znode_path, zk_host=\"localhost:2181\")\n require 'rubygems'\n require 'zookeeper'\n znode_found = false\n begin\n @zk = Zookeeper.new(zk_host)\n if !@zk.connected?\n raise \"znode_exists : Unable to connect to zookeeper\"\n end \n r = @zk.get(:path => znode_path)\n if r[:rc] == 0\n znode_found = true\n end \n rescue Exception => e\n puts e.message\n ensure\n @zk.close unless @zk.closed?\n end\n return znode_found\nend",
"def connected?\n begin\n @client.cluster.health\n rescue => e\n raise Xi::ML::Error::CaughtException, \\\n \"ES client not connected: #{e.message}\"\n end\n true\n end",
"def connected_with? nick\n @connection_cache.has_key? nick\n end",
"def instance_exists?(ec2_client, instance_id)\n ec2_client.describe_instances(instance_ids: [instance_id])\n return true\nrescue StandardError\n return false\nend",
"def client_subscribed?(client)\n cursor = parse \"BEGIN msf.acm_utils.manage_tocs(:publication, :cno, :action, :email, :cur_status, :status_dt, :r_str); end;\"\n cursor.bind_param ':cno', client.to_s\n cursor.bind_param ':email', nil, String, 64\n cursor.bind_param ':action', 'STATUS'\n exec cursor do\n cursor[':cur_status'] == 'subscribed'\n end\n end",
"def is_linked?\n\t\t\t@client.linked?\n\t\tend",
"def currentUserExists?\n # Get the current user.\n user = User.get(:current)\n \n # Check it exists.\n return User.exists?(user[\"id\"])\n rescue ActiveResource::UnauthorizedAccess\n @error = \"User unauthorized.\"\n return false\n rescue ActiveResource::ClientError => e\n @error = e.message\n return false\n rescue ActiveResource::ConnectionError => e\n @error = e.message\n return false\n rescue Errno::ECONNREFUSED => e\n @error = \"Unable to connect to #{@site}\"\n return false\n end",
"def find_server!\n @client.clients.each do |client|\n client.force_status! do |status|\n if status == :primary && [:primary, :primary_preferred, :secondary_preferred].include?(@client.read_pref)\n @pending_server = false\n server_found!\n elsif status == :secondary && [:secondary, :primary_preferred, :secondary_preferred].include?(@client.read_pref)\n @pending_server = false\n server_found!\n end\n end\n end\n end",
"def contains_host?(id)\n contains_resource?('HOSTS/ID', id)\n end",
"def exists?(id)\n resource_gateway.show_head(id)\n true\n rescue RestClient::ResourceNotFound\n false\n end",
"def server_exists?(connection, name)\n connection.servers.any? {|s| s.name == name}\n end",
"def exist?\n\t\t\tClient.head @root\n\t\tend"
] | [
"0.7496921",
"0.7363739",
"0.7176204",
"0.7176204",
"0.7113123",
"0.70755416",
"0.70607466",
"0.7052736",
"0.70427513",
"0.6930154",
"0.69246525",
"0.69246525",
"0.69246525",
"0.69246525",
"0.6894699",
"0.68587893",
"0.6844123",
"0.6816736",
"0.6816736",
"0.6800355",
"0.675972",
"0.6752882",
"0.6643669",
"0.66431904",
"0.66231996",
"0.6568467",
"0.65526265",
"0.6493127",
"0.64857775",
"0.6485304",
"0.6474882",
"0.6463278",
"0.64624226",
"0.64354414",
"0.641681",
"0.63764435",
"0.63509345",
"0.63322973",
"0.63238966",
"0.6315469",
"0.63109946",
"0.63109326",
"0.63067293",
"0.63005793",
"0.62931764",
"0.62332195",
"0.62020206",
"0.61959016",
"0.618819",
"0.61866367",
"0.61690205",
"0.61488056",
"0.6141327",
"0.6109251",
"0.6106582",
"0.60985667",
"0.6097367",
"0.60886145",
"0.6087456",
"0.6079197",
"0.6074197",
"0.60509163",
"0.6045057",
"0.60287446",
"0.5996497",
"0.5990443",
"0.59759164",
"0.5966718",
"0.59549993",
"0.5949476",
"0.5936252",
"0.5917428",
"0.5914589",
"0.5909906",
"0.5905972",
"0.59045535",
"0.58966434",
"0.58940053",
"0.5882936",
"0.58730924",
"0.58705384",
"0.58705384",
"0.5858368",
"0.58469915",
"0.58423835",
"0.58377236",
"0.58354807",
"0.5830035",
"0.5829667",
"0.5824763",
"0.5820196",
"0.5817324",
"0.58154374",
"0.5811183",
"0.58064324",
"0.58044696",
"0.57940865",
"0.5791672",
"0.57825553",
"0.5765964"
] | 0.80413896 | 0 |
Verify a clients login. Returns true when the specified login is correct | def authenticate(username, password)
perform_request({:action => 'client-authenticate', :username => username, :password => password})
statusmsg.match /validated/i
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_correct_login\n res = make_login Configuration.USER, Configuration.PASSWORD\n puts \"\\nTester#test_correct_login:\\n#{res}\" if Configuration.VERBOSE\n res and res.length == Constants.TOKEN_LENGTH and res =~ /^[0-9a-f]*$/\n end",
"def credential_match?(user, login, password)\n return false unless user.phone == login || user.email == login\n return false unless user.password == password\n return true\n end",
"def credential_match?(user, login, password)\n return false unless user.phone == login || user.email == login\n return false unless user.password == password\n return true\n end",
"def credential_match?(user, login, password)\n return false unless user.phone == login || user.email == login\n return false unless user.password == password\n return true\n end",
"def credential_match?(user, login, password)\n return false unless user.phone == login || user.email == login\n return false unless user.password == password\n true\n end",
"def credential_match?(user, login, password)\n false unless user.email == login || user.phone == login\n false unless user.password == password\n true\n end",
"def test_login\n if @auth!='' then\n result = do_request(json_obj('user.checkauth',\n {'sessionid'=>@auth}))\n if !result['result'] then\n @auth=''\n return false #auth hash bad\n end\n return true #auth hash good\n else\n return false\n end\n end",
"def login\n VkMusic.log.info(\"Client#{@id}\") { 'Logging in...' }\n login = Request::Login.new\n login.call(agent)\n login.send_form(@login, @password, agent)\n return true if login.success?\n\n VkMusic.log.warn(\"Client#{@id}\") { \"Login failed. Redirected to #{login.response.uri}\" }\n false\n end",
"def login?\n if login\n return true\n else\n return nil\n end\n end",
"def checkLoginMatch(result, loginName, loginPwd)\n\treturn result['password'] == loginPwd\nend",
"def login_check(response)\n\t\t\tif response.code == '200'\n\t\t\t\tputs \"#{response.code} - #{response.message}: Logged in\"\n\t\t\t\tputs \"site: #{api_location}\"\n\t\t\t\tputs \"with: #{username}\"\n\t\t\telse\n\t\t\t\tputs \"#{response.code} - #{response.body}: Failed to log in\"\n\t\t\t\tif @test_login == false\n\t\t\t\t\tabort #if login fails, then abort\n\t\t\t\tend\n\t\t\tend\n\t\tend",
"def valid_login?(login, password)\n if user = user?(login) and auth(user, password)\n return user\n end\n end",
"def validate(client, container_version, game_id, game_version, nick, captcha_code, encrypted_code)\n code = LOGIN_OK\n if nick.empty? || nick.length > NICK_MAX || nick !~ NICK_FORMAT\n code = LOGIN_DUPLICATE_NICK\n elsif !@captcha_factory.correct?(captcha_code, encrypted_code)\n code = LOGIN_WRONG_CAPTCHA\n elsif game_id >= 0\n code = Proxy.instance.check_login(container_version, game_id, game_version)\n end\n\n if code == LOGIN_OK\n return true\n else\n client.result('login', [code, nil], true)\n false\n end\n end",
"def validate_login_user(params)\n params[\"username\"] and params[\"password\"] and params[\"password\"] == params[\"password_2\"]\n end",
"def test_valid_login\n login_user = Login.authenticate(\"gordon\", \"wibble\")\n assert login_user == @login\n end",
"def check_login(username, password)\n hashed_password = Digest::SHA2.hexdigest(password)\n\n @login_database.each_with_index do |data, index|\n return true if (data.include? username + \" || \" + hashed_password)\n end\n return false\nend",
"def correct_credentials?\n correct_username = 'admin'\n correct_password = 'admin'\n input_username = @request.params['username']\n input_password = @request.params['password']\n correct_username == input_username && correct_password == input_password\n end",
"def verify_existing_login\n return if cookies[GlobalConstant::Cookie.user_cookie_name.to_sym].blank?\n\n @response = CompanyApi::Request::Client.new(\n CompanyApi::Response::Formatter::Client,\n request.cookies,\n {\"User-Agent\" => http_user_agent}\n ).fetch_verify_cookie_details\n\n # success means user is already logged in, we would redirect to dashboard / planner\n # Error means user ain't logged in yet.\n return unless @response.success?\n\n @presenter_obj = ::WebPresenter::UserPresenter.new(@response, params)\n\n if @presenter_obj.client_token.step_three_done?\n redirect_to :dashboard, status: GlobalConstant::ErrorCode.temporary_redirect and return\n else\n redirect_to :planner, status: GlobalConstant::ErrorCode.temporary_redirect and return\n end\n\n end",
"def login_is_valid\n username = params[\"uname\"]\n password = params[\"psw\"]\n\n if username == \"admin\" && password == \"password\"\n return true\n else\n return false\n end\nend",
"def login_verification(params_username, params_password) \n db = connect_to_database()\n database_info = db.execute(\"SELECT Username, Password, UserId FROM users WHERE users.Username = ?\", params_username)\n if database_info.length > 0 && BCrypt::Password.new(database_info.first[\"Password\"]) == params_password\n return true \n else\n return false\n end\n end",
"def logs_in?(user, password)\n \n @http = (@use_ssl? ? Net::HTTPS : Net::HTTP).new(@server, (@use_ssl ? 443 : 80))\n \n with_viewstate_and_connection(@http) do | payload |\n login_form_values = {\n \"txtUserName\" => user.to_s,\n \"txtUserPass\" => password.to_s,\n \"cmdLogin\" => \"cmdLogin\",\n \"listSpeed\" => \"Broadband\",\n \"__VIEWSTATE\" => payload,\n }\n \n begin\n @http.start do |http|\n form_post = Net::HTTP::Post.new(LOGIN_URL)\n form_post.set_form_data(login_form_values, '&')\n response = http.request(form_post); response.value\n end\n rescue Net::HTTPRetriableError => e\n if e.message =~ /302/ # RWW will return a redirect if the user is found\n return true\n end\n end\n return false\n end\n end",
"def authenticates_with?(login, pass, encryption_type=nil)\n return false unless self.send(props[:login]) == login\n encrypted(pass, encryption_type) == self.send(props[:crypted_password])\n end",
"def login?\n if session_code && session_token\n # check code exist at db.\n code_rec = get_code_rec(session_code, subject)\n return false unless code_rec\n # compare token\n code_rec.token == session_token\n else\n return false\n end\n end",
"def validate_login(user_name, password)\n if user_name.blank? || password.blank?\n flash_now(:error, get_blank_message(user_name, password))\n return false\n end\n\n # No validate file means only remote authentication is allowed\n return false unless Settings.validate_file\n\n ip = Settings.validate_ip ? request.remote_ip : nil\n authenticate_response = User.authenticate(user_name, password, ip: ip)\n custom_status = Settings.validate_custom_status_message[authenticate_response]\n\n if authenticate_response == User::AUTHENTICATE_BAD_PLATFORM\n flash_now(:error, I18n.t('main.external_authentication_not_supported'))\n elsif custom_status\n flash_now(:error, custom_status)\n elsif authenticate_response == User::AUTHENTICATE_SUCCESS\n return true\n else\n flash_now(:error, Settings.incorrect_login_message || I18n.t('main.login_failed'))\n end\n false\n end",
"def authenticate(server_user, server_pass, signed_text)\n begin\n token_array = decrypt(signed_text).split(':')\n\n s_user = token_array[0]\n expires = token_array[-1]\n\n return \"Server password missmatch\" if server_pass != password\n\n return \"User name missmatch\" if ( s_user != server_user ||\n s_user != @options[:srv_user] )\n\n return \"login token expired\" if Time.now.to_i >= expires.to_i\n\n return true\n rescue => e\n return e.message\n end\n end",
"def service_login?()\n return true if (@service == TAC_PLUS_AUTHEN_SVC_LOGIN)\n return false\n end",
"def check(creds)\n creds[1].crypt(password) == password\n end",
"def login_check(username, password)\n sql = <<~SQL\n SELECT * FROM users\n WHERE username = $1 AND password = $2;\n SQL\n check = query(sql, username, password)\n if check.ntuples == 1\n @user_id = check.tuple(0)[\"id\"]\n @username = check.tuple(0)[\"username\"]\n true\n else \n false \n end\n end",
"def check_login\n action = \"check_login\"\n url = build_url(action)\n\n # Create data for post\n data = post_data(action)\n\n # The raw should be the terminal number on success\n response = parse(ssl_post(url, data)) { |raw| { code: raw[0..6] } }\n success = response[:code] == @options[:terminal_no] ? true : false\n Response.new(\n success,\n \"\",\n response,\n authorization: \"\",\n test: test?\n )\n end",
"def authorize(login, password)\n return true unless CloudCrowd.config[:use_http_authentication]\n return CloudCrowd.config[:login] == login &&\n CloudCrowd.config[:password] == password\n end",
"def test05_ValidLogin_TC_24862\n\t\t$browser.cookies.clear\n\t\t$browser.goto($patch_login)\n\t\t$email.set(\"#{$user_master_email}\")\n\t\t$password.set(\"#{$user_master_password}\")\n\t\t$sign_in_button.click\n\t\tsleep 4\n\t\t\n\t\tbegin \n\t\t\tassert $logged_in_avatar.exists?\n\t\t\trescue => e\n\t\t\tputs e\n\t\t\tputs \"LS2T2: FAILED! User not logged in.\"\n\t\tend\t\n\tend",
"def validate_credentials(passwd)\n token = Security.logon_user(@username, nil, passwd,\n LOGON32_LOGON_NETWORK, LOGON32_PROVIDER_DEFAULT)\n true\n rescue Chef::Exceptions::Win32APIError => e\n Chef::Log.trace(e)\n # we're only interested in the incorrect password failures\n if /System Error Code: 1326/.match?(e.to_s)\n return false\n end\n\n # all other exceptions will assume we cannot logon for a different reason\n Chef::Log.trace(\"Unable to login with the specified credentials. Assuming the credentials are valid.\")\n true\n end",
"def check_credentials(username, password)\n return false\n end",
"def login\n response = get \"server\"\n response.code == 200\n end",
"def login\n response = get \"server\"\n response.code == 200\n end",
"def login_verify(partner)\n user_banner.visible?.eql?(partner.admin_info.email)\n if partner.base_plan.eql?(\"free\")\n free_user_verify(partner)\n else\n paid_user_verify(partner)\n end\n end",
"def valid?(login, password)\n login = login.to_s\n password = password.to_s\n if password == '' || password.include?(\"\\0\") || login.include?(\"\\0\")\n false\n elsif ldap_library == 'net/ldap'\n connection.authenticate(login_format % login, password)\n begin\n if connection.bind\n logger.info(\"Authenticated #{login} by #{server}\") if logger\n true\n else\n logger.info(\"Error attempting to authenticate #{login} by #{server}: #{connection.get_operation_result.code} #{connection.get_operation_result.message}\") if logger\n switch_server unless connection.get_operation_result.code == 49\n false\n end\n rescue Net::LDAP::Error, SocketError, SystemCallError => error\n logger.info(\"Error attempting to authenticate #{login} by #{server}: #{error.message}\") if logger\n switch_server\n false\n end\n else\n connection.unbind if connection.bound?\n begin\n connection.bind(login_format % login, password)\n connection.unbind\n logger.info(\"Authenticated #{login} by #{server}\") if logger\n true\n rescue LDAP::ResultError => error\n connection.unbind if connection.bound?\n logger.info(\"Error attempting to authenticate #{login} by #{server}: #{error.message}\") if logger\n switch_server unless error.message == 'Invalid credentials'\n false\n end\n end\n end",
"def valid_network_credentials?(password_plaintext)\n !! NETWORK_USER_CLASS.authenticate(network_user, password_plaintext)\n end",
"def test_wrong_user_login\n res = make_login Configuration.WRONG_USER, Configuration.PASSWORD\n puts \"\\nTester#test_wrong_user_login:\\n#{res}\" if Configuration.VERBOSE\n res == Constants.FAILURE_MESSAGE\n end",
"def validate_login(username, password)\n user = nil\n begin\n user = @users.find_one(_id: username)\n # you will need to retrieve right document from the users collection.\n p 'This space intentionally left blank.'\n rescue\n p 'Unable to query database for user'\n end\n\n if user.nil?\n p 'User not in database'\n return nil\n end\n\n salt = user['password'].split(',')[1]\n\n if user['password'] != make_pw_hash(password, salt)\n p 'user password is not a match'\n return nil\n end\n # Looks good\n user\n end",
"def authenticate(name, password)\n if name != \"Santa Claus\" || password != \"Ho Ho Ho!\"\n return false\n end\n true\nend",
"def require_login?\n !(Jiralicious.username.empty? && Jiralicious.password.empty?) && !alive?\n end",
"def login(try_password)\n pass = Password.new(self.password.encode('ascii-8bit'))\n return pass == try_password\n end",
"def verify_credentials\n\t\tif (has_valid_credentials) then\n\t\t logger.error(\"VC: Logged in\")\n\t\telse\n\t\t # username is nil, just clobber login_time\n\t\t logger.error(\"VC: Not logged in, redirecting to login\")\n\t\t clear_session()\n\t\t redirect_to :controller => 'application', :action => 'index'\n\t\tend\n\tend",
"def password_match?(login_password)\n\t\tencrypted_password == encrypt(login_password)\t\n\tend",
"def valid_signin?(credentials)\n account_data = File.readlines(ROOT + \"/users.txt\")\n\n valid_credentials = account_data.map do |account|\n name, password = account.strip.split(\": \")\n [name, password]\n end.to_h\n\n valid_credentials.each do |(name, password)|\n return true if credentials[:username] == name && BCrypt::Password.new(password) == credentials[:password]\n end\n\n false\nend",
"def login (db, username, password)\n results = db.get_first_value('SELECT Password FROM log_in WHERE TwitterUsername = ?', [username])\n if(results == password)\n return true\n else\n return false\n end\n \nend",
"def username_and_password_check\n company = get_company\n if company == nil\n exit\n end\n if get_username_and_test_validity(company)\n if get_password_and_test_validity(company)\n else\n puts \"That is not the correct password, please try again.\"\n entry_menu\n end\n else \n puts \"That is not the correct username, please try again.\"\n entry_menu\n end\n end",
"def login(username, access_key)\n return false unless username.present? && access_key.present?\n\n params = { :login => uri_encode(username), :code => access_key }\n response = self.class.get('/login.aspx', :query => params)\n xml = response.parsed_response\n\n if response.success? && !xml['bbapi'].has_key?('error')\n self.session_id = retrieve_cookie(response, SESSION_ID_COOKIE)\n self.auth_token = retrieve_cookie(response, AUTH_TOKEN_COOKIE)\n return true\n else\n return false\n end\n end",
"def verify_credentials!\n raise AuthenticationError.new(\"missing client code\") if Applitrack.client_code.nil? || Applitrack.client_code.empty?\n raise AuthenticationError.new(\"missing username\") if Applitrack.username.nil? || Applitrack.username.empty?\n raise AuthenticationError.new(\"missing password\") if Applitrack.password.nil? || Applitrack.password.empty?\n end",
"def login\n\t\tbegin\n\t\t\tr = execute(make_xml('LoginRequest', { 'sync-id' => 0, 'password' => @password, 'user-id' => @username }))\n\t\trescue APIError\n\t\t\traise AuthenticationFailed.new(r)\n\t\tend\n\t\tif(r.success)\n\t\t\t@session_id = r.sid\n\t\t\treturn true\n\t\tend\n\tend",
"def cookie_login\n @current_user = User.find(cookies[:user_id])\n return @current_user.password_hash == cookies[:password_hash] \n rescue \n return false\n end",
"def auth_ok?(username, password)\n username_present?(username) && password_ok?(username, password)\n end",
"def login\n self.session = GData::Client::Calendar.new if session.nil?\n @token = session.clientlogin(username,password)\n true\n end",
"def login\n username = find_username\n password = find_password\n\n Log.info(\"Logging in as #{username}... \", newline: false)\n\n conn = Faraday.new(url: \"https://redacted.ch/\")\n response = conn.post do |request|\n request.url \"login.php\"\n request.headers[\"User-Agent\"] = RedactedBetter.user_agent\n request.body = { username: username, password: password }\n end\n\n handle_login_response(response)\n rescue Faraday::TimeoutError\n Log.error(\"Logging in timed out. Perhaps Redacted is down?\")\n false\n end",
"def login_correctly\r\n\t\tuserid = \"correct_user@email.com\"\r\n\t\tpassword = \"correct_password\"\r\n\tend",
"def connect_login(global = true)\n ftpsock = connect(global)\n\n\n if !(user and pass)\n print_status(\"No username and password were supplied, unable to login\")\n return false\n end\n\n print_status(\"Authenticating as #{user} with password #{pass}...\")\n res = raw_send_recv(\"a001 LOGIN #{user} #{pass}\\r\\n\")\n\n if (res !~ /^a001 OK/)\n print_status(\"Authentication failed\")\n return false\n end\n\n return true\n end",
"def test_function_login_successfully\n username = \"ladywind\"\n password = \"1234567890\"\n result = V1::User.login(username,password)\n actual = result[:meta][:code]\n expected = 200\n puts this_method_name + \" - \" +assert_equal(expected, actual).to_s\n end",
"def handleLoginData(loginJSON, client)\n\tuserID = loginJSON[\"userID\"]\n\tpasswordHash = loginJSON[\"passwordHash\"]\n\n\tusers = @loginDatabase.query(\"SELECT * FROM users\")\n\n\tsleep(1) # wait a second so the client has time to wait for the response\n\n\t# Check the incoming data against all users, and if we have a match, send back an 'okay'\n\tusers.each do |user|\n\t\tif(userID == user[\"username\"] && passwordHash == user[\"password\"])\n\t\t\tputs \"okay\"\n\t\t\tclient.write(\"okay\")\n\n\t\t\t#get the existingJSON once from the database\n\t\t\t@existingJSON = user['data']\n\n\t\t\treturn\n\t\tend\n\tend\n\n\t# If we haven't given a response by now, send back a \"bad\" response\n\tclient.write(\"bad\")\nend",
"def checkLogin (loginName , loginPwd)\n\tconn = PGconn.open(:dbname => 'netflix')\n\tresult = conn.exec(\"SELECT * FROM users WHERE name=#{loginName};\")\n\treturn checkValidLogin(result, loginPwd)\t\nend",
"def online? login\n return false if !@usrData\n return true if @usrData[login]\n false\n end",
"def test_wrong_password_login\n res = make_login Configuration.USER, Configuration.WRONG_PASSWORD\n puts \"\\nTester#test_wrong_password_login:\\n#{res}\" if Configuration.VERBOSE\n res == Constants.FAILURE_MESSAGE\n end",
"def modelo_login_cliente(name,password)\n CSV.foreach($clientes,headers:true) do |row|\n if row['name'] == name && row['pass'] == password then return true end\n end\n false\nend",
"def test_verifier\n @client = SRP::Client.new @login,\n :password => @password,\n :salt => '4c78c3f8'.hex\n v = '474c26aa42d11f20544a00f7bf9711c4b5cf7aab95ed448df82b95521b96668e7480b16efce81c861870302560ddf6604c67df54f1d04b99d5bb9d0f02c6051ada5dc9d594f0d4314e12f876cfca3dcd99fc9c98c2e6a5e04298b11061fb8549a22cde0564e91514080df79bca1c38c682214d65d590f66b3719f954b078b83c'\n assert_equal v, \"%x\" % @client.verifier\n end",
"def auth_works?(server)\n return false unless (server && server.token && server.token.access_token)\n vspinner \"Testing if authentication works using current access token\" do\n Kontena::Client.new(server.url, server.token).authentication_ok?(master_account.userinfo_endpoint)\n end\n end",
"def checklogin\n\t\t@login = User.where(\"username = '#{params[:username]}'\").first\n\n\t\tp params, @login\n\n\t\tif @login && @login.password == params[:password]\n\t\t\tsession[:user_id] = @login.id\n\t\telse\n\t\t\tflash[:notice] = \"YOU ARE AN IMPOSTER\"\n\t\t\t@login = nil\n\t\t\tredirect_to '/'\n\t\tend\n\tend",
"def checkUserLogin(userName, userPass)\n userPass = userPass.encrypt\n user = loadUser(userName)\n if(user == nil)\n return false\n end\n userPassLoaded = user[\"password\"]\n #puts \"----------------------\\nPass1: [#{userPass}]\\nPass2: [#{userPassLoaded}]\\n-----------------------------------\"\n\n blocked = user[\"BLOCKED\"]\n if userPassLoaded == userPass && (blocked == nil || !blocked)\n return true\n else\n return false\n end\n end",
"def redirected_to_login?\n @agent.page.body.match \"#{@piggybank.url_base}/cas/login.php\"\n end",
"def check_login(login_text)\n begin\n @agent.redirect_ok = false\n resp = nil\n begin\n resp = @agent.get(addr(\"/feed\"),[],nil,{'cookie' => @cookie_login})\n @agent.redirect_ok = true\n rescue\n @agent.redirect_ok = true\n return nil\n end\n location = resp.response['location']\n #Phone needed\n if(location.to_s.index('security_check'))\n last_numbers = login_text[/\\d\\d\\d\\d$/]\n return nil unless last_numbers\n to = location.scan(/to\\=([^\\&]+)/)[0][0]\n #Follow redirect\n begin\n resp = @agent.get(addr(location),[],nil,{'cookie' => @cookie_login})\n rescue\n return nil\n end\n res = resp.body\n #Get hash\n hash = res.scan(/hash\\s*\\:\\s*\\'?\\\"?([^\\\"\\']+)\\\"?\\'?/)[0][0]\n post(\"/login.php\",{\"act\"=>\"security_check\",\"al\"=>\"1\",\"al_page\"=>\"3\",\"code\"=>last_numbers,\"hash\"=>hash,\"to\"=>to})\n begin\n resp = @agent.get(addr(\"/feed\"),[],nil,{'cookie' => @cookie_login})\n rescue\n return nil\n end\n end\n res = resp.body\n res.force_encoding(\"cp1251\")\n res = res.encode(\"utf-8\")\n\n id = User.get_id_by_feed(res)\n @uid = id\n return id\n rescue\n return false\n end\n end",
"def valid_credentials?(session)\n if session.login[:Credentials][:Username].length >= 1 && RuneRb::GLOBAL[:GAME_BANNED_NAMES].none? { |row| row[:name].include?(session.login[:Credentials][:Username]) }\n true\n else\n @responses[session].write(RuneRb::Network::LOGIN_RESPONSES[:BAD_CREDENTIALS], type: :byte, signed: false)\n raise RuneRb::System::Errors::SessionReceptionError.new(:username, nil, session.login[:Credentials][:Username])\n end\n\n if RuneRb::Database::PlayerProfile.fetch_profile(session.login[:Credentials])[:password] == session.login[:Credentials][:Password]\n true\n else\n @responses[session].write(RuneRb::Network::LOGIN_RESPONSES[:BAD_CREDENTIALS], type: :byte, signed: false)\n raise RuneRb::System::Errors::SessionReceptionError.new(:password, nil, nil)\n end\n true\n end",
"def verify_session proof, client_M\n @A = proof[:A]\n @B = proof[:B]\n @b = proof[:b].to_i(16)\n username = proof[:I]\n xsalt = proof[:s]\n v = proof[:v].to_i(16)\n\n u = SRP.calc_u(@A, @B, @N)\n # SRP-6a safety check\n return false if u == 0\n\n # calculate session key\n @S = \"%x\" % SRP.calc_server_S(@A.to_i(16), @b, v, u, @N)\n @K = SRP.sha1_hex(@S)\n\n # calculate match\n @M = \"%x\" % SRP.calc_M(username, xsalt, @A, @B, @K, @N, @g)\n\n if @M == client_M\n # authentication succeeded\n @H_AMK = \"%x\" % SRP.calc_H_AMK(@A, @M, @K, @N, @g)\n return @H_AMK\n end\n return false\n end",
"def check_login(username, password)\n\n results = $db.execute('SELECT * FROM users WHERE username = ?', username)\n p results\n if results == [] \n return -1\n end\n\n password_digest = results[0]['password_digest']\n userid = results[0]['userid']\n\n if BCrypt::Password.new(password_digest) == password \n return userid\n else\n return 0\n end\n end",
"def checkForUser(loginUser)\n @users = User.all\n\n @users.each do |user|\n if user[\"userName\"] == loginUser[\"userName\"] && user[\"password\"] == loginUser[\"password\"]\n return user\n end\n end\n return false\n\n end",
"def modelo_login_vendedor(name,password)\n CSV.foreach($vendedores,headers:true) do |row|\n if row['name'] == name && row['pass'] == password then return true end\n end\n false\nend",
"def cookie_login\n @current_player = Player.find(cookies[:player_id])\n @current_player.password_hash == cookies[:password_hash] \n rescue \n false\n end",
"def check_authentication( req )\n\t\tusername = req.params[:username]\n\t\tpassword = req.params[:password]\n\n\t\tunless hmac = self.class.users[ username ]\n\t\t\tself.log.error \"Auth failure: no such user %p\" % [ username ]\n\t\t\tfinish_with( HTTP::AUTH_REQUIRED, \"authentication failure\" )\n\t\tend\n\n\t\tpw_hmac = OpenSSL::HMAC.hexdigest( 'sha1', self.class.key, password )\n\t\tself.log.debug \" hash of 'demo' is: %p\" % [ OpenSSL::HMAC.hexdigest('sha1', self.class.key, 'demo') ]\n\n\t\tunless hmac == pw_hmac\n\t\t\tself.log.error \"Auth failure: password digests don't match: expected %p, got %p\" %\n\t\t\t\t[ hmac, pw_hmac ]\n\t\t\tfinish_with( HTTP::AUTH_REQUIRED, \"authentication failure\" )\n\t\tend\n\n\t\t# Tell the auth provider that the user provided valid credentials\n\t\tself.auth_provider.auth_succeeded( req, username )\n\n\t\treturn username\n\tend",
"def login\n capture_session(self.class.post( \"/api/login\", {:body => {:user => @user, :passwd => @password, :api_type => 'json'}, :debug_output => @debug} ) )\n logged_in?\n end",
"def authentication_ok?(token_verify_path)\n return false unless token\n return false unless token['access_token']\n return false unless token_verify_path\n\n final_path = token_verify_path.gsub(/\\:access\\_token/, token['access_token'])\n debug { \"Requesting user info from #{final_path}\" }\n request(path: final_path)\n true\n rescue => ex\n error { \"Authentication verification exception\" }\n error { ex }\n false\n end",
"def authenticate(login, password)\n u = find_by_login(login) # need to get the salt\n u && u.authenticated?(password) ? u : nil\n end",
"def authenticate(username,password)\n user = @@users_module.lookup_user(username)\n return false if not user\n salt=user['salt']\n hashpass=Digest::SHA256.hexdigest(salt + password)\n if(hashpass == user['password'])\n return true\n end\n return false\n end",
"def checkCredentials(username, passcode, domaincode = '127000000001')\r\n\r\n _dprint(\"checkCredentials(#{username}, #{passcode}, #{domaincode}) called ...\")\r\n\r\n validCredentials = false\r\n offline_challenge = ''\r\n offline_response = ''\r\n chap_password = ''\r\n chap_challenge = ''\r\n valid_tag = \"VERIFY:VALID\"\r\n\r\n _dprint(\"Checking Credentials...\")\r\n\r\n mesg = \"VERIFY:\" + username + \"\\t\" + passcode + \"\\t\" + domaincode\r\n mesg = <<XML\r\n <transaction>\r\n <type format=\"base\">2</type>\r\n <data>\r\n <user-id>#{username}</user-id>\r\n <passcode>#{passcode}</passcode>\r\n <domaincode>#{domaincode}</domaincode>\r\n <offline-challenge encoding=\"none\">#{offline_challenge}</offline-challenge>\r\n <offline-response encoding=\"none\">#{offline_response}</offline-response>\r\n <chap-password encoding=\"none\">#{chap_password}</chap-password>\r\n <chap-challenge encoding=\"none\">#{chap_challenge}</chap-challenge>\r\n <result>null</result>\r\n </data>\r\n </transaction>\r\nXML\r\n\r\n reconnect {\r\n\r\n xml = _request(mesg)\r\n response = XPath.first(xml, '//data/result')\r\n\r\n if response =~ /VALID/\r\n validCredentials = true\r\n else\r\n validCredentials = false\r\n end\r\n _dprint(\"Read response: verdict = \" + validCredentials.to_s)\r\n }\r\n\r\n _dprint(\"Returning Results...\")\r\n return validCredentials\r\n end",
"def verify_credentials\n if auth_supplied?\n response = get \"account/verify_credentials\"\n response.ok? ? response : false\n else\n false\n end\n end",
"def authenticate(username, password)\n true\n end",
"def known?(login = @locals[:guser])\n return false unless login\n return true if ENV['RACK_ENV'] == 'test'\n return true if login == settings.config['rewards']['login']\n return true if login == settings.config['exchange']['login']\n Zold::Http.new(uri: 'https://www.0crat.com/known/' + login).get.code == 200\nend",
"def able_to_login?(args = {})\n begin\n telnet = Net::Telnet::new(\"Host\" => args[:ip], \"Port\" => args[:port])\n telnet.login(args[:user], args[:password])\n return true\n rescue => e\n return false\n end\n end",
"def validate(x_login, x_api_key)\n response = get('/validate', :query => { :x_login => x_login, :x_apiKey => x_api_key })\n return response['data']['valid'] == 1\n end",
"def login(username=nil, password=nil)\n creds = {\n :userId => username || @username,\n :password => password || @password,\n }\n begin\n response = @client.call(:login, :message => creds)\n rescue => e\n # This can happen if a bad URL is given\n raise Cherby::LoginFailed, e.message\n end\n\n # This can happen if invalid credentials are given\n if response.body[:login_response][:login_result] == false\n raise Cherby::LoginFailed, \"Cherwell returned false status\"\n end\n\n # Store cookies so subsequent requests will be authorized\n @client.cookies = response.http.cookies\n\n # Double-check that login worked\n if self.last_error\n raise Cherby::LoginFailed, \"Cherwell returned error: '#{self.last_error}'\"\n end\n\n return true\n end",
"def login\n @login ||= begin\n client.user.login\n rescue Octokit::Unauthorized, Faraday::ConnectionFailed\n nil\n end\n end",
"def login (username=\"alien\", password=\"password\")\n\t\tif @connected\n\t\t\tbegin\n\t\t\t\t@sock.write \"#{username}\\r\\n\"\n\t\t\t\treceive()\n\t\t\t\t@sock.write \"#{password}\\r\\n\"\n\t\t\t\ts = receive()\n\t\t\t\t\n\t\t\t\tif s.include? \"Error:\"\n\t\t\t\t\terr_msg = s.scan(/(Error: )(.*)/).flatten[1]\n\t\t\t\t\tclose()\n\t\t\t\t\traise \"Trouble logging in. \" << err_msg\n\t\t\t\t\t@connected = false\n\t\t\t\tend\n\t\t\trescue\n\t\t\t\traise\n\t\t\tend\n\t\tend\n\n\t\treturn @connected\n\tend",
"def authenticate_user?\n authenticate_customer == \"200 OK\"\n end",
"def verify_session proof, client_M\n @A = proof[:A]\n @B = proof[:B]\n @b = proof[:b].to_i(16)\n username = proof[:I]\n xsalt = proof[:s]\n v = proof[:v].to_i(16)\n\n @u = SRP.calc_u(@A, @B, @N)\n # SRP-6a safety check\n return false if @u == 0\n\n # calculate session key\n @S = SRP.calc_server_S(@A.to_i(16), @b, v, @u, @N).to_hex_string\n @K = SRP.sha512_hex(@S)\n\n # calculate match\n @M = SRP.calc_M(username, xsalt, @A, @B, @K, @N, @g).to_hex_string\n\n if @M == client_M\n # authentication succeeded\n @H_AMK = SRP.calc_H_AMK(@A, @M, @K, @N, @g).to_hex_string\n return @H_AMK\n end\n return false\n end",
"def valid_crowd_authentication?(password)\n Devise::Crowd::Adapter.valid_credentials?(login_with, password)\n end",
"def authenticate(user, pass)\n self.logger.debug \"#{user} : #{pass}\"\n Digest::MD5.hexdigest(pass) == CREDENTIALS[user]\n end",
"def login(mail, passwd)\n request_body = '&Email='+CGI.escape(mail)+'&Passwd='+CGI.escape(passwd)+'&accountType=HOSTED&service=apps'\n res = request({:method => 'POST', :path => '/accounts/ClientLogin' }, nil, request_body, {'Content-Type'=>'application/x-www-form-urlencoded'})\n return /^Auth=(.+)$/.match(res.to_s)[1]\n # res.to_s needed, because res.class is REXML::Document\n end",
"def login(username, password)\n if password == 'test'\n print username, 'ALLOWED'\n else\n print username, 'DENIED'\n end\nend",
"def verify_user(username, password, users)\n un = get_user(username, users)\n if(un.nil?)\n return false\n else\n return BCrypt::Password.new(un[:password]) == password\n end\n end",
"def do_login(user, pass)\n vprint_status(\"#{rhost}:#{rport} - Trying username:'#{user.inspect}' with password:'#{pass.inspect}'\")\n begin\n res = send_request_cgi(\n {\n 'uri' => \"/doms/login/processLogin.php\",\n 'method' => 'GET',\n 'vars_get' =>\n {\n 'login' => user,\n 'passwd' => pass,\n 'tzOffset' => '-25200',\n 'tzString' => 'Thur+May+05+1983+05:05:00+GMT+0700+'\n }\n })\n\n if res.nil?\n print_error(\"#{rhost}:#{rport} - Connection timed out\")\n return :abort\n end\n\n check_key = \"The user has logged in successfully.\"\n\n key = JSON.parse(res.body)[\"statusString\"]\n\n if (not res or key != \"#{check_key}\")\n vprint_error(\"#{rhost}:#{rport} - FAILED LOGIN. '#{user.inspect}' : '#{pass.inspect}' with code #{res.code}\")\n return :skip_pass\n else\n print_good(\"#{rhost}:#{rport} - SUCCESSFUL LOGIN. '#{user.inspect}' : '#{pass.inspect}'\")\n report_cred(\n ip: rhost,\n port: rport,\n service_name: 'SevOne Network Performance Management System Application',\n user: user,\n password: pass,\n proof: key\n )\n return :next_user\n end\n\n rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout, ::Rex::ConnectionError, ::Errno::EPIPE\n print_error(\"#{rhost}:#{rport} - HTTP Connection Failed, Aborting\")\n return :abort\n end\n end",
"def needs_login?() false end",
"def chapVerify(username, domaincode, wikidChallenge = '', chapPassword = '', chapChallenge = '')\r\n\r\n _dprint(\"chapVerify() called ...\")\r\n reconnect()\r\n validCredentials = false\r\n valid_tag = \"VERIFY:VALID\"\r\n _dprint(\"Checking Chap Credentials\")\r\n\r\n mesg = \"CHAPOFFVERIFY:\" + username + \"\\t\" + \"nil\" + \"\\t\" + domaincode + \"\\t\" + wikidChallenge\r\n\r\n reconnect {\r\n\r\n $sslsocket.puts(chapPassword.length)\r\n $sslsocket.puts(chapPassword)\r\n $sslsocket.puts(chapChallenge.length)\r\n $sslsocket.puts(chapChallenge.length)\r\n $sslsocket.flush\r\n\r\n _dprint(\"Reading in...\")\r\n\r\n inputLine = $sslsocket.gets.chomp!\r\n if (inputLine[0, valid_tag.length] == valid_tag)\r\n validCredentials = true\r\n end\r\n }\r\n\r\n return validCredentials\r\n end",
"def authenticate?( user, salt, auth )\n return auth == calculate_auth_string( salt, user )\n end"
] | [
"0.7224755",
"0.7023317",
"0.7023317",
"0.7023317",
"0.6988713",
"0.6818284",
"0.6800512",
"0.67621803",
"0.6728221",
"0.67221004",
"0.6692971",
"0.6649924",
"0.664224",
"0.65973926",
"0.65734494",
"0.65288544",
"0.6464526",
"0.64614815",
"0.6452756",
"0.6435171",
"0.63927394",
"0.6376569",
"0.63693553",
"0.63647586",
"0.6363026",
"0.6349275",
"0.63182753",
"0.6314717",
"0.6312653",
"0.6312057",
"0.6291703",
"0.62873757",
"0.6279915",
"0.62735456",
"0.62735456",
"0.62570566",
"0.6246102",
"0.6234123",
"0.6210213",
"0.61846316",
"0.61815333",
"0.6175702",
"0.617502",
"0.6173914",
"0.6147167",
"0.6146753",
"0.6127434",
"0.61174375",
"0.6105455",
"0.6103037",
"0.60934323",
"0.6089108",
"0.6079116",
"0.60763043",
"0.6071622",
"0.6062598",
"0.60415787",
"0.6027428",
"0.60187835",
"0.60179734",
"0.6004696",
"0.5990323",
"0.59889966",
"0.5978848",
"0.59782714",
"0.59716153",
"0.59634674",
"0.5960491",
"0.5958498",
"0.5936753",
"0.59348077",
"0.59315705",
"0.5928536",
"0.59255475",
"0.5920609",
"0.5920418",
"0.5919899",
"0.59187824",
"0.59081405",
"0.5902511",
"0.5902395",
"0.58976585",
"0.5895349",
"0.58886236",
"0.58866346",
"0.58817655",
"0.58788276",
"0.58746773",
"0.5873287",
"0.58732617",
"0.5869943",
"0.5863675",
"0.58601606",
"0.5854528",
"0.585046",
"0.584853",
"0.5839863",
"0.5838833",
"0.58214504",
"0.58137953"
] | 0.6087821 | 52 |
Generate the JWT required for the initial GitHub Integrations API handshake. | def new_jwt_token
payload = {
iat: Time.now.to_i, # Issued at time.
exp: (10 * 60) + Time.now.to_i, # JWT expiration time.
iss: APP_ID # Integration's GitHub identifier.
}
JWT.encode(payload, PRIVATE_KEY, 'RS256')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_jwt\n JWT.encode({\n id: id, \n exp: 60.days.from_now.to_i\n }, \n Rails.application.secrets.secret_key_base\n )\n end",
"def generate_jwt\n JWT.encode({ id: id,\n exp: 60.days.from_now.to_i },\n Rails.application.secrets.secret_key_base)\n end",
"def get_jwt_token\n private_key = OpenSSL::PKey::RSA.new(GITHUB_PRIVATE_KEY)\n\n payload = {\n # issued at time\n iat: Time.now.to_i,\n # JWT expiration time (10 minute maximum)\n exp: 5.minutes.from_now.to_i,\n # GitHub App's identifier\n iss: GITHUB_APP_ID\n }\n\n JWT.encode(payload, private_key, \"RS256\")\nend",
"def generate_jwt(expires_in: nil)\n SolidusJwt.encode(payload: as_jwt_payload, expires_in: expires_in)\n end",
"def signed_token(**payload)\n payload = {\n iss: 'The Identity Provider',\n iat: Time.now.to_i,\n exp: 1.hour.from_now.to_i,\n aud: ['example-api', 'user-api', 'calendar-api']\n }.merge(payload)\n private_key = OpenSSL::PKey::RSA.new(file_fixture('rsa1').read)\n JWT.encode(payload.to_h, private_key, 'RS256')\nend",
"def get_jwt_token\n payload = { data: {user: {id: self.id, email: self.email}} }\n payload[:exp] = (Time.now + Settings.jwt_token_expiry.days).to_i\n\n JWT.encode payload, ENV[\"HMAC_SECRET\"], 'HS256'\n end",
"def make_jwt\n token_payload = {\n 'tmcusr' => @username,\n 'tmctok' => @tmc_token,\n 'tmcuid' => @tmc_user_id,\n 'tmcadm' => @is_tmc_admin,\n 'exp' => @expires.to_i\n }\n jwt_string = JWT.encode(token_payload, @@jwt_secret, JWT_HASH_ALGO)\n jwt_string\n end",
"def generate_token\n begin\n @header = JSON.parse(@header) unless @header.is_a?(Hash)\n @payload = JSON.parse(@payload) unless @payload.is_a?(Hash)\n @alg = @header['alg'] if @alg.nil? # if algorithm not forced, take if from the header\n\n header_encoded = encode(@header.to_json)\n payload_encoded = encode(@payload.to_json)\n data = \"#{header_encoded}.#{payload_encoded}\"\n signature_encoded = encode(generate_sig(data, @alg, @key).signature)\n token = [header_encoded, payload_encoded, signature_encoded].join('.')\n\n set_hash_and_json\n token\n rescue JSON::ParserError => e\n puts '[x] '.red + \"Invalid JSON: #{e.message}\"\n puts \"[!] \".yellow + \"Make sure you've single quoted your input: eg. --header #{\"'\".bold}{\\\"type\\\":\\\"JWT\\\",\\\"alg\\\":\\\"HS256\\\"}#{\"'\".bold}\"\n exit!\n rescue Exception => e\n puts \"[x] \".red + \"Unknown Exception: generate_sig\"\n puts '[!] '.yellow + 'Please report the issue at: https://github.com/KINGSABRI/jwtear/issues'.underline\n puts e\n puts e.backtrace\n end\n end",
"def jwt_encode(payload, headers = {})\n # Add security claims to payload\n payload.reverse_merge!(\n # Time at which the Issuer generated the JWT (epoch).\n iat: Time.now.to_i,\n\n # Expiration time on or after which the tool MUST NOT accept the ID Token for\n # processing (epoch). This is mostly used to allow some clock skew.\n exp: Time.now.to_i + 5.minutes.to_i,\n\n # String value used to associate a tool session with an ID Token, and to mitigate replay\n # attacks. The nonce value is a case-sensitive string.\n nonce: SecureRandom.uuid\n )\n\n # Add additional info into the headers\n headers.reverse_merge!(\n # Set the id of they key\n kid: jwk_kid\n )\n\n JWT.encode(payload, private_key, ALGORITHM, headers)\n end",
"def generate_token\n jwt_secret = CLIENT_SECRET\n header = {\n typ: \"JWT\",\n alg: TOKEN_ALG\n }\n current_timestamp = DateTime.now.strftime(\"%Q\").to_i / 1000.floor\n data = {\n iss: SERVICE_ID,\n iat: current_timestamp\n }\n stringified_header = header.to_json.encode(\"UTF-8\")\n encoded_header = base64url(stringified_header)\n stringified_data = data.to_json.encode(\"UTF-8\")\n encoded_data = base64url(stringified_data)\n token = \"#{encoded_header}.#{encoded_data}\"\n signature = OpenSSL::HMAC.digest(\"SHA256\", jwt_secret, token)\n signature = base64url(signature)\n signed_token = \"#{token}.#{signature}\"\n signed_token\n end",
"def jwt\n return nil if params.format_version.zero?\n SimpleJWT.new(jwt_data, :mac => mac, :key => key, :key_id => key_id)\n end",
"def generate_jwt(pem_key, expiration = nil)\n payload = {\n 'sub': 123,\n 'username': 'mail@example.com',\n 'exp': expiration || Time.now.to_i + 4 * 3600\n }\n JWT.encode(payload, pem_key, 'RS256')\nend",
"def generate_json_web_token(time)\n JWT.encode(\n {\n sub: key_info['oxtrust_client_id'],\n iss: key_info['oxtrust_client_id'],\n exp: time.to_i + 86_400,\n iat: time.to_i,\n jti: SecureRandom.hex(10),\n aud: 'https://localhost/oxauth/restv1/token'\n },\n load_auth_key,\n 'RS256',\n jwt_headers,\n )\n end",
"def new_jwt\n Knock::AuthToken.new(payload: { sub: current_user.id }).token\n end",
"def generateJWT( email )\n return ({:token => (email + @@secret)}.to_json)\n end",
"def dev_jwt_token\n JWT.encode(\n {user_id: id},\n Rails.application.secrets.json_web_token_key,\n 'HS256'\n )\n end",
"def jwt_subject\n auth0_id_string\n end",
"def issue_token(payload)\n JWT.encode(payload, Rails.application.credentials.secret_key_base)\n # JWT.encode(payload, ENV[\"SOME_SECRET\"], ENV[\"SOME_SUPER_SECRET\"])\nend",
"def signed_jwt\n # We're using the Ruby JWT library.\n # There are very well supported libraries in nearly every language here: https://jwt.io/\n JWT.encode(jwt_payload, median_api_key)\n end",
"def jwt_key\n 'ec6a8b69ae22049f900af9bd9f14ffb4dc6937f69575ab49b4df2d28364055b8'\n end",
"def encode_token(payload)\n JWT.encode(payload, Rails.application.secrets.secret_key_base, 'HS256')\n end",
"def encodeJWT(creator, exp=2.hours.from_now)\n # add the expire to the payload, as an integer\n payload = { creator_id: creator.id }\n payload[:exp] = exp.to_i\n\n # Encode the payload whit the application secret, and a more advanced hash method (creates header with JWT gem)\n JWT.encode( payload, Rails.application.secrets.secret_key_base, \"HS512\")\n\n end",
"def generate_token (id, number_of_days)\n JWT.encode({ :user_id => id, :expires => expires_in(number_of_days) }, 'somesecrethere')\n end",
"def encode_token(payload) #encodes your username\n JWT.encode(payload, ENV['SECRET'])\n end",
"def get_jwt_token(data)\n payload = {data: data}\n secret_key = GlobalConstant::ExplorerApi.secret_key\n\n JWT.encode(payload, secret_key, 'HS256')\n end",
"def create_one_time_token\n @one_time_token =\n JSON::JWT.new(iss: 'try-it-juku-test',\n exp: 5.minutes.since,\n nbf: Time.current,\n sub: { shin_cd: @current_chief.shin_cd })\n .sign(ACCESS_TOKEN_SIGNATURE, :HS256).to_s\n end",
"def jwt_auth_token( secret )\n\n # expire in 5 minutes\n exp = Time.now.to_i + 5 * 60\n\n # just a standard claim\n exp_payload = { exp: exp }\n\n return JWT.encode exp_payload, secret, 'HS256'\n\n end",
"def encode(payload)\n #return token\n JWT.encode(payload, secret_key, 'HS256')\n end",
"def generate_token(_user)\n JsonWebToken.encode(login_payload, 3.days.from_now)\n end",
"def issue_token payload\n JWT.encode(payload, secret, algorithm)\n end",
"def jwt_encode(payload)\n JWT.encode(payload, private_key, ALGORITHM, kid: jwk_kid)\n end",
"def encode(payload)\n token = JWT.encode(payload, secret_key, 'HS256')\n end",
"def token_generator(params)\n Auth::JsonWebToken.encode(params)\n end",
"def create_token(id, username)\n JWT.encode(payload(id, username), ENV['JWT_SECRET'], 'HS256')\n end",
"def create_token(id, username)\n JWT.encode(payload(id, username), ENV['JWT_SECRET'], 'HS256')\n end",
"def encode_token(payload)\n #! Environment variables will need to be set!\n JWT.encode(payload, ENV['SECRET_KEY_BASE'])\n end",
"def token\n JWT.encode(claims, rsa_key, 'RS512')\n end",
"def generate_token(user)\n payload = {user_id: user.id}\n JWT.encode(payload, ENV[\"SEC_KEY\"] , 'HS256')\n end",
"def auth_token \n JWT.encode({id: self.id}, \"9885ea7895518eaf88c4a8a2e8f62c82\")\n end",
"def encode_token(payload)\n # Secret environment variable is found in config/application.yml\n JWT.encode(payload, ENV[\"SECRET\"])\n end",
"def to_token_payload\n payload = {}\n # std jwt claims\n payload['sub'] = id.to_s\n payload['iat'] = Time.now.utc.to_i\n payload['iss'] = Rails.application.secrets.jwt_issuer\n # sombra claims\n payload['role'] = role\n payload['name'] = name\n payload\n end",
"def generate_jwt(user)\n now = Time.current.to_i\n\n payload = { sub: user.id,\n iat: now,\n exp: now + Config.jwt_lifetime }\n\n payload.merge!(user.public_send(Config.jwt_additional_user_payload_action)) if Config.jwt_additional_user_payload_action\n\n { token: jwt_encode(payload),\n payload: payload }\n end",
"def jwt_headers\n {\n typ: 'JWT',\n alg: key_info['alg'],\n kid: key_info['kid']\n }\n end",
"def encode(creator)\n #data to be saved in JSON\n payload = { id: creator.id, userName: creator.userName, expires: 2.hours.from_now.to_i}\n \n #very much safe\n JWT.encode( payload, Rails.application.secrets.secret_key_base, \"HS512\")\n end",
"def authentication_token\n @authentication_token ||= JWT.encode(payload, secret, algorithm)\n end",
"def encode_token(payload)\n JWT.encode(payload, 'SECRET')\n end",
"def generate_unique_secure_token\n return unless user_id\n\n unless ENV['JWT_HMAC_SECRET'].present?\n raise 'JWT_HMAC_SECRET is missing in env' unless Rails.env.test?\n end\n\n self.token ||= ::JWT.encode(\n {\n user: {\n id: user_id,\n created_at: Time.now.utc.iso8601(3)\n }\n },\n ENV['JWT_HMAC_SECRET'],\n 'HS256'\n )\n end",
"def call(payload)\n payload_to_encode = merge_with_default_claims(payload)\n JWT.encode(payload_to_encode, secret, algorithm)\n end",
"def make_token(user_id)\n payload = { user_id: user_id }\n JWT.encode(payload, hmac_secret, 'HS256')\n end",
"def generate_jws(header:, payload:, key:)\n jwt = JSON::JWT.new(JSON.parse(payload, symbolize_names: true))\n jwt.header = JSON.parse(header, symbolize_names: true)\n handle_signing(jwt, key)\n rescue JSON::JWS::UnexpectedAlgorithm => e\n puts \"Unexpected algorithm '#{jwt.header[:alg]}'.\"\n puts e.message\n exit!\n rescue Exception => e\n print_error e.message\n end",
"def auth_token\n JWT.encode({ id: self.id }, '65bc368fbc69306')\n end",
"def create\n payload = {\n user: {\n name: Faker::Name.name,\n email: Faker::Internet.email\n }\n }\n\n jwt = Auth.issue(payload)\n\n render json: { jwt: jwt }\n end",
"def encode_token(payload)\n JWT.encode(payload, 'yourSecret')\n end",
"def verification_token\n JWT.encode({ iat: Time.now.to_i }, config.secret, JWT_ALG)\n end",
"def encode_token(payload) \n # this method takes in a payload (a hash of key/values you want to save in the token) and signs a token using a secret key. (in production this should an ENV variable.)\n JWT.encode(payload, 'yourSecret') \n end",
"def token(secret, claims={})\n payload = {}\n payload.merge!(claims)\n puts secret\n JWT.encode payload, [secret].pack('H*').bytes.to_a.pack('c*'), 'HS256'\nend",
"def encode(payload)\n JWT.encode(payload, secret_key, 'HS256')\n end",
"def encode(payload)\n JWT.encode(payload, secret_key, 'HS256')\n end",
"def encode_token(payload) # arg is data identifying user — e.g., DB id and username\n \n JWT.encode(payload, secret,algorithm)\n end",
"def to_jwt(person=nil)\n now = Time.new \n assertion = {\n \"iss\" => @issuer,\n \"scope\" => self.scope,\n \"aud\" => \"https://accounts.google.com/o/oauth2/token\",\n \"exp\" => (now + expiry).to_i,\n \"iat\" => now.to_i\n }\n assertion['prn'] = person unless person.nil?\n return JWT.encode(assertion, @key, \"RS256\")\n end",
"def auth_bypass_token\n JWT.encode(\n {\n \"sub\" => auth_bypass_id,\n \"content_id\" => content_id,\n \"iat\" => Time.zone.now.to_i,\n \"exp\" => 1.month.from_now.to_i,\n },\n Rails.application.secrets.jwt_auth_secret,\n \"HS256\",\n )\n end",
"def auth_bypass_token\n JWT.encode(\n {\n \"sub\" => auth_bypass_id,\n \"content_id\" => content_id,\n \"iat\" => Time.zone.now.to_i,\n \"exp\" => 1.month.from_now.to_i,\n },\n Rails.application.secrets.jwt_auth_secret,\n \"HS256\",\n )\n end",
"def to_s\n JWT.encode(to_h, ENV['JWT_KEY'])\n end",
"def sign()\n exp_payload = { :data => to_json, :exp => validto.to_s}\n JWT.encode exp_payload, secret, 'HS512'\n end",
"def authenticate_app\n payload = {\n # The time that this JWT was issued, _i.e._ now.\n iat: Time.now.to_i,\n\n # JWT expiration time (10 minute maximum)\n exp: Time.now.to_i + (10 * 60),\n\n # Your GitHub App's identifier number\n iss: APP_IDENTIFIER\n }\n logger.debug \"JWT payload: #{payload}\"\n\n # Cryptographically sign the JWT.\n jwt = JWT.encode(payload, PRIVATE_KEY, 'RS256')\n\n # Create the Octokit client, using the JWT as the auth token.\n @app_client ||= Octokit::Client.new(bearer_token: jwt)\n end",
"def encode_token(payload)\n # I expect something like payload => { userid: int }\n JWT.encode(payload, Rails.application.secrets.secret_key_base)\n end",
"def encode_token(payload)\n JWT.encode(payload, \"secret\")\n end",
"def encode(payload)\n payload.reverse_merge!(meta)\n JWT.encode(payload, HMAC_SECRET)\n end",
"def token_generate\n res = call('auth.token_generate')\n\n return unless res || res['token']\n\n res['token']\n end",
"def encode_token(payload, exp = 24.hours.from_now)\n payload[:exp] = exp.to_i\n JWT.encode(payload, SECRET_KEY) \n end",
"def encode_token(payload)\n JWT.encode(payload, 'secret')\n end",
"def encode_token(payload)\n #PAYLOAD => {salad: 'tomatoes'}\n JWT.encode(payload, ENV[\"JWT_SECRET\"])\n #jwt string: 'hdjgjdkgjgjsetc...'\n end",
"def token\n HVCrypto::JWT.encode(self[:token], audience)\n end",
"def original_request_jwt\n env['grape_jwt_auth.original_token']\n end",
"def jwt_auth_header\n { Authorization: \"Bearer #{Quovo::Token.get}\" }\n end",
"def public_jwk\n JWT::JWK.create_from(public_key)\n end",
"def secret_key\n jwt_config['secret_key']\n end",
"def encode(payload)\n JWT.encode(payload, secret_key, 'HS512')\n end",
"def jwt_payload\n @jwt_payload ||= request.env['JWT_PAYLOAD']\n end",
"def generate_authentication_token\n self.auth_token = User.new_token\n\t\t\tself.auth_expires_at = Time.now + 240.hours\n\tend",
"def encode(payload, exp = 24.hours.from_now)\n payload[:exp] = exp.to_i\n JWT.encode(payload, private_key, ALGORITHM)\n end",
"def get_keys(jwt)\n\t\tp 'Setting the keys'\n\t\t@decodedJWT = JWT.decode(jwt.to_s,nil,settings.appSig)\n\t\t@jwtJSON = JSON.parse(@decodedJWT.to_json) \n\t\tsettings.oauthToken = @jwtJSON[\"request\"][\"user\"][\"oauthToken\"]\n\t\tsettings.internalOauthToken = @jwtJSON[\"request\"][\"user\"][\"internalOauthToken\"]\n\t\tsettings.refreshToken = @jwtJSON[\"request\"][\"user\"][\"refreshToken\"]\n\t\tsettings.jwt = jwt\n\t\tsettings.exp = @jwtJSON[\"exp\"]\nend",
"def generate_and_set_token(user=nil)\n user ||= create(:user_with_token)\n current_time = Time.now\n jwt_config = Rails.application.secrets.jwt\n token = JWT.encode({\"iat\" => current_time.to_i,\n \"iss\" => jwt_config['issuer'],\n \"exp\" => (current_time + 14.days).to_i,\n \"user_id\" => user.id},\n jwt_config['secret_key'],\n jwt_config['hmac_sha_algo'])\n request.headers['Authorization'] = \"Bearer #{token}\"\n end",
"def encode\n raise RuntimeError, 'secret key is not set' unless @secret\n\n payload = @claims.merge({\n iss: issuer,\n aud: audience,\n sub: user_uid,\n perms: permissions.map do |perm|\n # encode each set of permission into an array\n # first element - resource\n # remaining elements - actions granted\n [perm[:resource]].push(*perm[:actions])\n end\n })\n\n payload[:exp] = expiration.to_i if expiration\n\n JWT.encode payload, @secret, ALGORITHM\n end",
"def jwt_encode(payload)\n JWT.encode(payload, Config.jwt_encode_key, Config.jwt_algorithm)\n end",
"def generate_authentication_token\n SecureRandom.hex(8)\n end",
"def auth_token(payload = nil)\n if payload.nil?\n payload = {data: {authenticated: true, user: current_user_data}}\n end\n\n EchoCommon::Services::Jwt.encode payload\n end",
"def generate!\n raise ArgumentMissingError unless has_all_required_inputs?\n payload = Base64.urlsafe_encode64(data.to_json).gsub(/[\\=\\n]/, '')\n signature = Base64.urlsafe_encode64(\n OpenSSL::HMAC.digest(\n OpenSSL::Digest.new('sha256'),\n secret,\n HEADER + '.' + payload\n )\n ).strip.gsub(/[\\=\\n]/, '')\n [HEADER, payload, signature].join('.')\n end",
"def encode_token(playload)\n JWT.encode(playload, \"Git-Pet\")\n end",
"def generate_authorization_token\n \t# create the token that contains the necessary elements to authorize the user\t\n \t# using a nested array because the alphabetical order must be maintained\n \ttoken = [['credentials', self.user.to_credential_string,], ['identity', self.user.to_identity_string], ['time', Time.now.to_i.to_s]]\n \tencoded_parms = token.collect {|pair| pair[1] = CGI.escape(pair[1]); pair.join('=')}.join('&')\n\n digest = Digest::SHA2.new\n digest.update(encoded_parms)\n\n hmac = HMAC::SHA256.new(self.options[:shared_secret])\n hmac.update(encoded_parms)\n\n # add the hashed digital signature to the end of the query parameters\n encoded_parms += \"&signature=#{hmac.hexdigest}\"\n end",
"def jwt_get_signature url\n\n payload=JWT.encode({ #Payload\n key: \"master\",\n method: \"GET\",\n path: url,\n },\n \"badgemaster\", #Secret\n \"HS256\", #Algoritmo\n {typ: \"JWT\", alg:\"HS256\"} #Headers\n )\n\n end",
"def authentication_token\n generate_token(:authentication_token)\n end",
"def client_key_authentication\n if request.headers['JWT'].present?\n auth_header = request.headers['JWT'].split(' ').last\n @token_payload = decodeJWT auth_header.strip\n if @token_payload\n @creators_id = @token_payload[0]['creators_id']\n else\n selected_format({ error: 'The provided token wasn´t correct' },:bad_request)\n end\n else\n selected_format({ error: 'Need to include the Authorization header JWT with correct key' },:forbidden)\n end\n end",
"def generate_api_key\n digest = Digest::SHA2.new(512)\n # generate a unique token\n unique_seed = SecureRandom.hex(20)\n digest.update(\"#{unique_seed} SECRET! #{Time.current.to_f}\").to_s\n end",
"def generate_api_key\n digest = Digest::SHA2.new(512)\n # generate a unique token\n unique_seed = SecureRandom.hex(20)\n digest.update(\"#{unique_seed} SECRET! #{Time.current.to_f}\").to_s\n end",
"def as_jwt_payload\n options = SolidusJwt::Config.jwt_options\n claims = { sub: id }\n\n as_json(options)\n .merge(claims)\n .as_json\n end",
"def jwt_authenticator\n @jwt_authenticator ||=\n Peatio::Auth::JWTAuthenticator.new(Rails.configuration.jwt_public_key)\n end",
"def token(expiration=nil)\n expiration ||= 1\n payload = {\n data: {\n id: id,\n discriminator: password_digest\n # discriminator used to detect password changes after token generation\n },\n exp: Time.now.to_i + expiration * 60 * 60\n }\n # HMAC using SHA-512 algorithm\n JWT.encode payload, User.hmac_key, 'HS512'\n end",
"def make_token\r\n # From the restful-authentication plug-in\r\n args = [ Time.now, (1..10).map{ rand.to_s } ]\r\n Digest::SHA1.hexdigest(args.flatten.join('--'))\r\n end",
"def jwt_authenticator\n @jwt_authenticator ||=\n Peatio::Auth::JWTAuthenticator.new(Rails.configuration.x.jwt_public_key)\n end"
] | [
"0.770027",
"0.76739204",
"0.75817466",
"0.7255995",
"0.7002499",
"0.6947139",
"0.6928979",
"0.68599576",
"0.68227065",
"0.6818304",
"0.68032545",
"0.67955744",
"0.6738663",
"0.6720331",
"0.6719839",
"0.6706644",
"0.6667884",
"0.66223526",
"0.6599878",
"0.6583407",
"0.6580559",
"0.65610284",
"0.6559345",
"0.6537756",
"0.65305597",
"0.6509304",
"0.64970917",
"0.64877045",
"0.6484663",
"0.6462577",
"0.64445937",
"0.644347",
"0.6403412",
"0.64021325",
"0.64021325",
"0.63737506",
"0.63595235",
"0.63500524",
"0.6349964",
"0.634872",
"0.6340543",
"0.63387966",
"0.63365984",
"0.6322414",
"0.63158846",
"0.63088316",
"0.6299652",
"0.62963116",
"0.6294731",
"0.6290634",
"0.6289057",
"0.6282914",
"0.62675625",
"0.6266153",
"0.62594473",
"0.6252692",
"0.62371945",
"0.62371945",
"0.62224966",
"0.62202436",
"0.6213982",
"0.6213982",
"0.620293",
"0.6196883",
"0.61741614",
"0.61156756",
"0.61109567",
"0.60989475",
"0.6092022",
"0.6081132",
"0.608048",
"0.6077604",
"0.6074172",
"0.60724324",
"0.6071921",
"0.6070418",
"0.6043433",
"0.6038568",
"0.60067725",
"0.59664273",
"0.5918881",
"0.59186065",
"0.59056884",
"0.58773404",
"0.5869922",
"0.58692515",
"0.5867214",
"0.5845619",
"0.58126307",
"0.581229",
"0.5805961",
"0.57892233",
"0.5785882",
"0.57850444",
"0.57850444",
"0.5780836",
"0.57727295",
"0.576485",
"0.5746585",
"0.5733236"
] | 0.75237906 | 3 |
Gets the attachments property value. Readonly. Nullable. Supports $expand. | def attachments
return @attachments
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def attachment\n @attachment\n end",
"def attachments\n @attachments\n end",
"def attachments\n mail&.attachments || []\n end",
"def attachments\n parts.attachments\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments\n Easybill::Api::Attachments\n end",
"def get_attachment(name)\n\n unless (model.properties.has_property?(:attachments) &&\n model.properties[:attachments].type == DataMapper::Types::JsonObject &&\n model.properties[:attachments].field == :_attachments)\n raise ArgumentError, \"Attachments require ' property :attachments, JsonObject, :field => :_attachments'\"\n end\n\n unless self.id && self.attachments && self.attachments[name]\n nil\n else\n http = Net::HTTP.new(repository.adapter.uri.host, repository.adapter.uri.port)\n uri = attachment_path(name)\n response, data = http.get(uri, { 'Content-Type' => self.attachments[name]['content_type'] })\n\n unless response.kind_of?(Net::HTTPSuccess)\n nil\n else\n data\n end\n end\n\n end",
"def attachments\n @attachments ||= []\n end",
"def attachments\n @_message.attachments\n end",
"def attachments\n @attachments ||= begin\n return message.attachments unless message.attachments.empty?\n if full_text_part.nil? && full_html_part.nil?\n [ message ]\n else\n []\n end\n end\n end",
"def attachment(key)\n # Use to_a.find to take advantage of the eager-loaded attachments and blobs.\n attachments.to_a.find { |a| a.key == key && a.file&.attached? }\n end",
"def record\n attachment.record\n end",
"def attachment(name)\n name = name.to_s\n return attachments[name][:file] if @attachments.try(:[], :name).try(:[], :file)\n begin\n result = retrieve_attachment(name)\n @attachments = (@attachments || {}).merge(name => {:file => result[:file], :dirty => false, :content_type => result[:content_type]})\n result[:file]\n rescue RestClient::ResourceNotFound\n nil\n end\n end",
"def attachments_permission\n @attributes[:attachments_permission]\n end",
"def attachments_permission\n @attributes[:attachments_permission]\n end",
"def attachment\n object.send(attribute_name)\n end",
"def attachments_metadata\n @attachments_metadata || {}\n end",
"def get_attachment(name)\n assert_attachments_property\n\n attachment = self.attachments[name] if self.attachments\n\n unless self.id && attachment\n nil\n else\n adapter = repository.adapter\n http = Net::HTTP.new(adapter.uri.host, adapter.uri.port)\n uri = Addressable::URI.encode_component(attachment_path(name))\n response, data = http.get(uri, 'Content-Type' => attachment['content_type'])\n\n unless response.kind_of?(Net::HTTPSuccess)\n nil\n else\n data\n end\n end\n\n end",
"def attachments=(value)\n @attachments = Array.new\n unless value.nil? || value.empty?\n value.each do |v1|\n if v1.instance_of? AttachmentJson\n @attachments.push(v1)\n end\n end\n end\n end",
"def attachments\n @attachments ||= ActiveStorage::Attachment.where(record_gid: record.to_gid.to_s, name: name)\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachment_data\n @attachment_url && @client.get_file(@attachment_url)\n end",
"def attachment_representation\n parametrized_attachment.tap do |attachment|\n %w(filename target_directory max_file_size).each do |attr|\n attachment.public_send(\"#{attr}=\", public_send(attr))\n end\n attachment.readonly!\n end\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def get_attachment_leaves\n return _get_attachment_leaves_recursive(self.mail)\n end",
"def attachments\n (msg['attachments']||{}).map{|attached| Mandrill::WebHook::Attachment[attached.last] }\n end",
"def attachment_path\n return unless attachment?\n\n case attachment.options[:storage]\n when :filesystem then attachment.path\n else attachment.url\n end\n end",
"def attachment_name\n @name\n end",
"def get_attachment_url\n return self.attachment.url unless self.attachment.blank?\n end",
"def get_attachment(db, locale, version, attachment_name, ttl=settings.long_ttl)\n id = get_topic_or_image_id(locale, version, attachment_name)\n return get_from_db_or_cache(db, id, {:trx => CouchDBTransaction::FETCH_ATTACHMENT, :attachment_name => attachment_name}, ttl)\n end",
"def attachments\n if attachment_ids\n attachment_ids.map {|id| Attachment.find(id)}\n else\n []\n end\n end",
"def attachment\n mail.attachments.map do |attachment|\n blob = ActiveStorage::Blob.create_after_upload!(\n io: StringIO.new(attachment.decoded),\n filename: attachment.filename,\n content_type: attachment.content_type\n )\n return blob\n end\n end",
"def attachments\n process_message_body if !@attachments\n @attachments\n end",
"def provision_attachments_permission\n @attributes[:provision_attachments_permission]\n end",
"def attached(attclass)\n attachments[attclass.to_s.classify]\n end",
"def attachment_path\n return unless attachment.attached?\n\n attachment.service_url\n end",
"def filename\n find_attachment\n end",
"def value\r\n @value ||= self.class.service_instance.get_blob(path)\r\n end",
"def attachments\n model_documents = self.model_documents.reverse\n end",
"def attachment( document_id, attachment_id )\n new( :id => document_id ).attachments.get!( attachment_id )\n end",
"def attached?\n attachment.present?\n end",
"def get_attach_size\n\n return 0 if self.mail_attachments.nil? or self.mail_attachments.empty?\n\n sum = 0\n\n self.mail_attachments.each do |attach|\n sum += attach.size\n end\n\n return sum\n end",
"def name\n attachment.name\n end",
"def temporary_attachments\n self.class.read_inheritable_attribute(:temporary_attachments) || []\n end",
"def attached_file\n=begin # TODO: remove when :json resolved\n file || (file_attacher.load_data(file_data) if file_data.present?)\n=end\n file || file_attacher_load\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def attachment\n record.public_send(\"#{name}_file\")\n end",
"def get_attachment attachment_index\n begin\n \n if @filename == ''\n raise 'filename not specified'\n end\n \n if attachment_index == ''\n raise 'attachment index not specified'\n end\n \n \n str_uri = $product_uri + '/pdf/' + @filename + '/attachments/' + attachment_index.to_s\n str_signed_uri = Aspose::Cloud::Common::Utils.sign(str_uri) \n response_stream = RestClient.get(str_signed_uri, {:accept=>'application/json'})\n \n stream_hash = JSON.parse(response_stream)\n \n return stream_hash['Attachment']\n \n \n rescue Exception=>e\n print e\n end\n end",
"def attached?\n attachments.any?\n end",
"def attachments_for_export\n []\n end",
"def show\n @attachment = @specification.attachment\n end",
"def client_attachments\n self.messages.where(location_submitted: Message::CLIENT_EMAIL_LOCATIONS).collect { |message| message.attachments }.flatten\n end",
"def attachments?\n !attachments.empty?\n end",
"def set_AttachmentsWithData(value)\n set_input(\"AttachmentsWithData\", value)\n end",
"def media\n return @media\n end",
"def attachment\n participant.attachment(rubric_item.filename)\n end",
"def get_attachment_text_clipped\n if self.cached_attachment_text_clipped.nil?\n # As side effect, get_attachment_text_full makes snippet text\n attachment_text = self.get_attachment_text_full\n raise \"internal error\" if self.cached_attachment_text_clipped.nil?\n end\n\n return self.cached_attachment_text_clipped\n end",
"def attachment?\n !!find_attachment\n end",
"def retrieve_attachment(name)\n { :file => RestClient.get(\"#{ uri }/#{ CGI.escape(name) }\"),\n :content_type => JSON.parse(RestClient.get(uri))['_attachments']['name'] }\n end",
"def attachment?\n attachment.present? && attachment.readable?\n end",
"def device\n attachments[0]['device'] if has_attachments?\n end",
"def file_details\n return @file_details\n end",
"def attachments\n eco_documents = self.eco_documents.reverse\n eco_documents.delete_if { |d| d.specification? }.reverse\n end",
"def show\n email = Gmailer.gmail.inbox.find(:all).find {|e| e.msg_id == @message.message_id.to_i } #convert due to wrong type\n @attachments = email ? email.message.attachments : []\n end",
"def get_attachment_text_full\n text = self._get_attachment_text_internal\n self.mask_special_emails!(text)\n self.remove_privacy_sensitive_things!(text)\n # This can be useful for memory debugging\n #STDOUT.puts 'xxx '+ MySociety::DebugHelpers::allocated_string_size_around_gc\n \n # Save clipped version for snippets\n if self.cached_attachment_text_clipped.nil?\n self.cached_attachment_text_clipped = text[0..MAX_ATTACHMENT_TEXT_CLIPPED]\n self.save!\n end\n \n return text\n end",
"def get_volume_attachment_hrefs\n attachments = @api_client.volume_attachments.index(filter: [\"instance_href==#{get_instance_href}\"])\n\n # Reject following attachments:\n # - attachments whose device parameter is set to 'unknown'\n # - gce boot disk - shown by resource_uid of /disks/boot-*\n # - aws ebs boot disk - shown by device_id as '/dev/sda1'\n # - cloudstack boot disk - shown by device_id as 'device_id:0'\n attachments.reject! do |attachment|\n attachment.device == 'unknown' ||\n attachment.resource_uid =~ %r{\\/disks\\/boot-} ||\n (node['cloud']['provider'] == 'ec2' && attachment.device_id == '/dev/sda1') ||\n (node['cloud']['provider'] == 'cloudstack' && attachment.device_id == 'device_id:0')\n end\n\n attachments.map(&:href)\n end",
"def uploaded_filename_without_extension\n attachment.try(:original_filename_without_extension)\n end",
"def has_attachments\n preneed_attachments.present?\n end",
"def value\n attributes['FieldValue']\n end",
"def extension_item\n return @children['extension-item'][:value]\n end",
"def attachment_urls\n @list.when_list do\n result = call(\"Lists\", \"get_attachment_collection\", \"listName\" => @list.id, \"listItemID\" => @id)\n return result.xpath(\"//sp:Attachment\", NS).map { |att| att.text }\n end\n @list.when_document_library { raise TypeError, \"a document library does not support attachments\" }\n @list.raise_on_unknown_type\n end",
"def avatar_file\n @attributes[:avatar_file]\n end",
"def get(\n id,\n deadline: nil\n )\n return @account_attachments.get(\n id,\n deadline: deadline,\n )\n end",
"def file_as_body\n @attributes[:file_as_body]\n end",
"def mime_part\n message.mime_part\n end",
"def attachments\n if File.exists?(attachment_dir)\n return Dir.glob(File.join(attachment_dir, '*')).map { |f| Attachment.new(f, intname) }\n else\n false\n end\n end",
"def show\n @file_upload_attachments = @file_upload.file_upload_attachments.all\n end",
"def active_storage_param(relation)\n if relation.has_one?\n relation.name.to_s.gsub(/(_attachment)$/, '').to_sym\n else\n { relation.name.to_s.gsub(/(_attachments)$/, '').to_sym => [] }\n end\n end",
"def file_attachments_path\n ENV['IMPORT_PATH'] || '/opt/data'\n end",
"def attachment_definitions\n read_inheritable_attribute(:attachment_definitions)\n end",
"def image_url\n product.images.first.try(:attachment).try(:url)\n end",
"def get_value(property_path)\n element = @tree.select(property_path)[0]\n element.nil? ? nil : element.value\n end",
"def fullname\n \"#{self.attached_file_identifier}\"\n end",
"def file\n @files.first ? @files.first[0] : nil\n end",
"def file\n @files.first ? @files.first[0] : nil\n end",
"def set_attachment\n @project = Project.find(params[:project_id])\n @attachment = @project.attachments.find(params[:id])\n @attachment.owner\n end",
"def has_attachments?\n !(attachments.nil? || attachments.empty? || attachments[0].empty?)\n end"
] | [
"0.6709699",
"0.6701456",
"0.6624822",
"0.6510028",
"0.6489727",
"0.6489727",
"0.6489727",
"0.62525403",
"0.6244717",
"0.6181477",
"0.6153343",
"0.61388344",
"0.60685533",
"0.5976693",
"0.5935467",
"0.59322715",
"0.59322715",
"0.5931048",
"0.592182",
"0.5915424",
"0.5894746",
"0.58776224",
"0.5780023",
"0.5780023",
"0.5780023",
"0.5780023",
"0.575681",
"0.57213205",
"0.57010305",
"0.56800103",
"0.56542194",
"0.56542194",
"0.56542194",
"0.56542194",
"0.5653875",
"0.56155765",
"0.5605668",
"0.5595747",
"0.55723816",
"0.5571808",
"0.55380815",
"0.55247766",
"0.55096805",
"0.5491395",
"0.5398948",
"0.5376672",
"0.537407",
"0.53523386",
"0.5324795",
"0.5313223",
"0.5293909",
"0.5280462",
"0.52779114",
"0.52686936",
"0.52653456",
"0.52342504",
"0.52342504",
"0.5220562",
"0.51912665",
"0.5187551",
"0.51080906",
"0.51025",
"0.5099285",
"0.50908065",
"0.50888133",
"0.50712913",
"0.50633043",
"0.5058793",
"0.505477",
"0.50492007",
"0.5040579",
"0.5040479",
"0.503665",
"0.50138336",
"0.49886113",
"0.49763262",
"0.4972463",
"0.49684736",
"0.49624273",
"0.49474153",
"0.4942914",
"0.49428564",
"0.49396953",
"0.49369618",
"0.49288014",
"0.49268234",
"0.49107856",
"0.4903049",
"0.49019688",
"0.48866844",
"0.4881166",
"0.48754144",
"0.48648885",
"0.48621312",
"0.4851841",
"0.4851841",
"0.4850876",
"0.48382273"
] | 0.6567483 | 5 |
Sets the attachments property value. Readonly. Nullable. Supports $expand. | def attachments=(value)
@attachments = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def attachments=(value)\n @attachments = Array.new\n unless value.nil? || value.empty?\n value.each do |v1|\n if v1.instance_of? AttachmentJson\n @attachments.push(v1)\n end\n end\n end\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def set_AttachmentsWithData(value)\n set_input(\"AttachmentsWithData\", value)\n end",
"def attachment=(params)\n super params\n import_attachment if persisted? && attachment.try(:valid?)\n end",
"def attachments=(attachments)\n attachments.each do |attachment|\n assets.build(attachment)\n end\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def update!(**args)\n @attachments = args[:attachments] if args.key?(:attachments)\n end",
"def update!(**args)\n @attachments = args[:attachments] if args.key?(:attachments)\n end",
"def set_attachment\n @attachment = Attachment.find(params[:id])\n end",
"def set_attachfile\n @attachfile = Attachfile.find(params[:id])\n end",
"def set_attachment\n @project = Project.find(params[:project_id])\n @attachment = @project.attachments.find(params[:id])\n @attachment.owner\n end",
"def set_embedded_attachment\n @embedded_attachment = EmbeddedAttachment.find(params[:id])\n end",
"def add_attachments(value)\n if value.instance_of? AttachmentJson\n @attachments.push(value)\n end\n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def set_media_attachment\r\n @media_attachment = MediaAttachment.find(params[:id])\r\n end",
"def set_attachment\n @attachment = Programs::Attachment.find(params[:id])\n end",
"def imported_attachment=(attachment)\n self.attachment = attachment\n clear_attachment_change\n end",
"def uploaded_attachments=(attachments)\n\t\tattachments.each do |attach|\n\t\t\tif attach && attach.size > 0\n\t\t\t\tattachment = Clip.new\n\t\t\t\tattachment.uploaded_data = attach\n\t\t\t\tself.clips << attachment\n\t\t\tend\n\t\tend\n\tend",
"def set_file_upload_attachment\n @file_upload_attachment = FileUploadAttachment.find(params[:id])\n end",
"def set_property_attachment\n @property_attachment = PropertyAttachment.find(params[:id])\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n @attachments ||= []\n end",
"def set_order_attachment\n @order_attachment = OrderAttachment.find(params[:id])\n end",
"def set_user_attachment\n @user_attachment = UserAttachment.find(params[:id])\n end",
"def forward_as_attachment_to=(value)\n @forward_as_attachment_to = value\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def set_size\n self.size = attachment.size if attachment.size\n end",
"def attachment_content_type=(value)\n self.content_type=value\n end",
"def set_product_attachment\n @product_attachment = ProductAttachment.find(params[:id])\n end",
"def default_image_file=(file)\n self.attachments.build(file: file, role: 'default_image')\n end",
"def set_object_attachment\n @object_attachment = ObjectAttachment.find(params[:id])\n end",
"def update!(**args)\n @attachment_data_ref = args[:attachment_data_ref] if args.key?(:attachment_data_ref)\n end",
"def set_product_attachment_file\n @product_attachment_file = ProductAttachmentFile.find(params[:id])\n end",
"def permitted_attachments_params\n params.permit(attachments: [])\n end",
"def swfupload_file=(data)\n self.photo_attach = data\n end",
"def set_friends_attachment\n @friends_attachment = FriendsAttachment.find(params[:id])\n end",
"def set_pub_attachment\n @pub_attachment = PubAttachment.find(params[:id])\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def attachments\n mail&.attachments || []\n end",
"def update!(**args)\n @add_attachments = args[:add_attachments] if args.key?(:add_attachments)\n end",
"def attachment_name=(name)\n @attachment_name = name\n end",
"def attachment_params\n params.fetch(:attachment).permit(:file)\n end",
"def set_attached_image\n @attached_image = AttachedImage.find(params[:id])\n end",
"def set_attached_image\n @attached_image = AttachedImage.find(params[:id])\n end",
"def attachment_name=(name)\n @attachment_name = name\n end",
"def new_file_attachment_attributes=(file_attachment_attributes)\n file_attachment_attributes.each do |attributes|\n file_attachments.build(attributes) unless attributes[\"attachment\"].blank?\n end\n end",
"def attachment\n @attachment\n end",
"def set_travel_post_attachment\n @travel_post_attachment = TravelPostAttachment.find(params[:id])\n end",
"def set_design_image_attachment\n @design_image_attachment = DesignImageAttachment.find(params[:id])\n end",
"def default_image_file=(file)\n attachments.build(file: file, role: 'default_image')\n end",
"def update!(**args)\n @attachments = args[:attachments] if args.key?(:attachments)\n @timestamp = args[:timestamp] if args.key?(:timestamp)\n @value = args[:value] if args.key?(:value)\n end",
"def set_attached_file\n @attached_file = AttachedFile.find(params[:id])\n end",
"def file=(file)\n self.name = filename(file)\n self.attachment = Attachment.find_or_initialize_by(file: file)\n end",
"def attachment_params\n params.require(:attachment).permit( :attachment)\n end",
"def set_deposit_attachment\n @deposit_attachment = DepositAttachment.find(params[:id])\n end",
"def set_portfolio_item_attachment\n @portfolio_item_attachment = PortfolioItemAttachment.find(params[:id])\n end",
"def set_gallery_attachment\n @gallery_attachment = GalleryAttachment.find(params[:id])\n end",
"def set_cms_attachment\n @cms_attachment = Cms::Attachment.find(params[:id])\n end",
"def set_note_attachment\n @note_attachment = NoteAttachment.find(params[:id])\n end",
"def set_armor_attachment\n @armor_attachment = ArmorAttachment.find(params[:id])\n end",
"def set_pdf_attachment\n @pdf_attachment = PdfAttachment.find(params[:id])\n end",
"def connect_attachments(params)\n json_attachments = params['data'].try(:[], 'relationships').try(:[], 'attachments')\n return if json_attachments.blank?\n\n attachment_list = resource_list('attachment', json_attachments)\n\n attachment_list.each do |attachment|\n attachment.attachable = self\n attachment.save\n end\n end",
"def attachment_params\n params.fetch(:attachment, {}).permit(:file_name, :attachment, :report_id)\n end",
"def attachments\n @attachments\n end",
"def attachment_params\n params.require(:attachment).permit(:file_type, :attachment, :parent_id)\n end",
"def attachment_representation\n parametrized_attachment.tap do |attachment|\n %w(filename target_directory max_file_size).each do |attr|\n attachment.public_send(\"#{attr}=\", public_send(attr))\n end\n attachment.readonly!\n end\n end",
"def attachment(name)\n name = name.to_s\n return attachments[name][:file] if @attachments.try(:[], :name).try(:[], :file)\n begin\n result = retrieve_attachment(name)\n @attachments = (@attachments || {}).merge(name => {:file => result[:file], :dirty => false, :content_type => result[:content_type]})\n result[:file]\n rescue RestClient::ResourceNotFound\n nil\n end\n end",
"def attachments\n Easybill::Api::Attachments\n end",
"def save_attachments attachments\n Attachment.add(attachments, self)\n end",
"def set_cms_attachment\n @cms_attachment = current_portal.cms_attachments.find(params[:id])\n end",
"def attachment(key)\n # Use to_a.find to take advantage of the eager-loaded attachments and blobs.\n attachments.to_a.find { |a| a.key == key && a.file&.attached? }\n end",
"def set_incidentattachment\n @incidentattachment = Incidentattachment.find(params[:id])\n end",
"def attachments\n parts.attachments\n end",
"def set_link_attachment\n @link_attachment = LinkAttachment.find(params[:id])\n end",
"def of_update_attachment\n if !$attachment.nil?\n $attachment.destroy\n $attachment = Attachment.new\n end\n $attachment_changed = true\n $attachment.avatar = params[:file]\n $attachment.id = 1\n $attachment.save!\n if $attachment.save\n render json: { \"image\" => $attachment.avatar }\n else\n render json: { \"image\" => \"\" }\n end\n end",
"def ti_update_attachment\n if !$attachment.nil?\n $attachment.destroy\n $attachment = Attachment.new\n end\n $attachment.avatar = params[:file]\n $attachment.id = 1\n $attachment.save!\n if $attachment.save\n render json: { \"image\" => $attachment.avatar }\n else\n render json: { \"image\" => \"\" }\n end\n end",
"def set_work_order_attachment\n @work_order_attachment = WorkOrderAttachment.find(params[:id])\n end",
"def update!(**args)\n @addon_attachments = args[:addon_attachments] if args.key?(:addon_attachments)\n end",
"def attachment_params\n params.require(:attachment).permit(:file)\n end",
"def attachment_params\n params.require(:attachment).permit(:file)\n end",
"def attachment( document_id, attachment_id )\n new( :id => document_id ).attachments.get!( attachment_id )\n end",
"def set_event_attachment\n @event_attachment = EventAttachment.find(params[:id])\n end",
"def set_pin_attachment\n @pin_attachment = PinAttachment.find(params[:id])\n end",
"def attachments\n @attachments ||= ActiveStorage::Attachment.where(record_gid: record.to_gid.to_s, name: name)\n end"
] | [
"0.7079706",
"0.69901305",
"0.69901305",
"0.69901305",
"0.69901305",
"0.69037825",
"0.6379742",
"0.62929124",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6069635",
"0.6022422",
"0.6022422",
"0.602212",
"0.59902143",
"0.5975776",
"0.59535885",
"0.5927472",
"0.59183264",
"0.59183264",
"0.59183264",
"0.58948797",
"0.5854964",
"0.58304614",
"0.57950413",
"0.57918906",
"0.5782019",
"0.5750469",
"0.57443535",
"0.5719275",
"0.5621024",
"0.5618892",
"0.55998826",
"0.5589058",
"0.5589058",
"0.55739236",
"0.55717224",
"0.55593675",
"0.5558617",
"0.5547166",
"0.5529713",
"0.550119",
"0.5501165",
"0.5494716",
"0.5487665",
"0.5485364",
"0.5442793",
"0.543222",
"0.54260916",
"0.54247564",
"0.542267",
"0.54210716",
"0.54210716",
"0.5407182",
"0.54027486",
"0.53697556",
"0.5368307",
"0.5354393",
"0.53511196",
"0.53449607",
"0.53329635",
"0.53167135",
"0.5311105",
"0.52983826",
"0.52886397",
"0.5277765",
"0.52637684",
"0.52604735",
"0.52507687",
"0.5239701",
"0.52228206",
"0.5217854",
"0.52133137",
"0.52023643",
"0.5193458",
"0.5187229",
"0.5182609",
"0.51791984",
"0.5175031",
"0.51747257",
"0.5153877",
"0.5153854",
"0.513125",
"0.5128365",
"0.5126854",
"0.5124253",
"0.51074183",
"0.51059896",
"0.51059896",
"0.50997955",
"0.5099779",
"0.50967884",
"0.50891197"
] | 0.8004808 | 2 |
Gets the body property value. The contents of the post. This is a default property. This property can be null. | def body
return @body
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getBody\n @body\n end",
"def body\n self[:body]\n end",
"def body\n read_attribute(:body) || ''\n end",
"def body\n @body || \"\"\n end",
"def body\n data[:body]\n end",
"def body\n if defined? @body\n @body\n else\n Null\n end\n end",
"def body\n @body ||= @request.body.read\n end",
"def body\n self['body']\n end",
"def body\n @attributes[:body]\n end",
"def body\n return @body\n end",
"def body\n self.read_body\n self.get_body\n end",
"def body(value = nil)\n if value\n @body = value\n else\n @body\n end\n end",
"def body\n process_message_body if !@body\n @body\n end",
"def body\n @body ||= process_text(@raw_body)\n end",
"def body=(value)\n @body = value\n end",
"def body=(value)\n @body = value\n end",
"def body=(value)\n @body = value\n end",
"def body=(value)\n @body = value\n end",
"def body=(value)\n @body = value\n end",
"def body\n if @parsed_body\n parsed_body.to_s\n else\n @body\n end\n end",
"def body(value = nil)\n\t\t\tvalue ? @body = value : @body ||= ''\n\t\tend",
"def body=(value)\n @body = value\n end",
"def body\n @message.body\n end",
"def body\n remote[:body] || reload.remote[:body]\n end",
"def body(value = nil)\n if value\n self.body = value\n# add_encoding_to_body\n else\n process_body_raw if @body_raw\n @body\n end\n end",
"def body\n ret = read_attribute(:body)\n if ret.nil?\n return ret\n end\n ret = ret.strip\n ret = ret.gsub(/(?:\\n\\s*){2,}/, \"\\n\\n\") # remove excess linebreaks that unnecessarily space it out\n ret\n end",
"def body\n fetch\n @body\n end",
"def getBody()\n return @req.body\n end",
"def body_raw\n @body_raw ||= body(true).read\n @body_raw\n end",
"def raw_body\n @attributes[:raw_body]\n end",
"def body\n @body ||= (\n JSON.parse(response.body) if response.body.length > 0\n )\n end",
"def parsed_body\n @body\n end",
"def body\n Base64::decode64(self[:body])\n end",
"def body\n @body ||= reader_doc.content(true).strip\n end",
"def body body=nil\n @response.body = body if body\n @response.body\n end",
"def text\n @body\n end",
"def body\n if @http_response != nil \n @body = @http_response.body\n end\n @body\n end",
"def get_body\n request_object.body\n end",
"def body\n response.body || ''\n end",
"def body\n @raw\n end",
"def body\n if raw_post = @env['RAW_POST_DATA']\n raw_post.force_encoding(Encoding::BINARY) if raw_post.respond_to?(:force_encoding)\n StringIO.new(raw_post)\n else\n @env['rack.input']\n end\n end",
"def set_body\n self.body = \"\" unless body\n body\n end",
"def set_body\n self.body = \"\" unless body\n body\n end",
"def body\n @body ||= Page.convert_raw_to_html(raw_body)\n end",
"def body(new = nil)\n if new.nil?\n @body\n else\n with(body: body.merge(new))\n end\n end",
"def text\n body.text\n end",
"def body\n nil\n end",
"def body\n part('body')\n end",
"def article_body\n respond_to?(:body) ? body : ''\n end",
"def body\n @binding&.body || \"\"\n end",
"def body\n @body_io.read.tap { @body_io.rewind }\n end",
"def body(request)\n request.body.read\n end",
"def body\n response&.body.to_s\n end",
"def text\n self.body\n end",
"def body\n @body ||= begin\n begin\n io = Java.java_to_ruby(\n org.jruby.RubyIO.new(JRuby.runtime, entity.content).java_object\n )\n io.read\n rescue Exception => e\n puts \"Error in Response#body: #{e.message}\"\n end\n end\n end",
"def plain_body\n mail&.plain_body\n end",
"def read_body(env)\n env[:body] = env[:body].read if env[:body].respond_to? :read\n end",
"def body\n @note.content\n end",
"def html_body\n self[:html_body]\n end",
"def original_body\n @original_body\n end",
"def body\n @response.body\n end",
"def body\n @response.body\n end",
"def read\n\t\t\t\t\t@body.read\n\t\t\t\tend",
"def body=(newval)\n if self[:body] != newval\n # changed\n self[:body] = newval\n end\n self[:body]\n end",
"def body\n response.body\n end",
"def body\n @http_response.body\n end",
"def body\n @body ||= begin\n body = \"\"\n while chunk = readpartial\n body << chunk\n end\n body\n end\n end",
"def body(path)\n get(path).body\n end",
"def body\n JSON.parse(@response.body) unless @response.body.empty?\n end",
"def body\n JSON.parse(@response.body) unless @response.body.empty?\n end",
"def body\r\nif raw_post = @env['RAW_POST_DATA']\r\nraw_post.force_encoding(Encoding::BINARY)\r\nStringIO.new(raw_post)\r\nelse\r\n@env['rack.input']\r\nend\r\nend",
"def body\n \"\"\n end",
"def body\n \"\"\n end",
"def body\n ''\n end",
"def body(*args)\n return @body if args.empty?\n @body = args.first\n end",
"def body\n @chunk\n end",
"def has_body?\n @body\n end",
"def body\n json? ? handle_json : handle_raw\n end",
"def body=(value)\n body_lazy(value)\n end",
"def set_body\n self.body = \"\" unless self.body\n end",
"def body_url\n @attributes[:body_url]\n end",
"def body_preview\n return @body_preview\n end",
"def body\n return \"\"\n end",
"def body\n return nil unless file?\n has_yaml_props? ?\n yaml_split[1] :\n data[0]\n end",
"def body?\n\t\t\t\t\t@body and !@body.empty?\n\t\t\t\tend",
"def body\n source\n end",
"def body\n connection.get(@url.path).body\n end",
"def body\n response.body.to_s\n end",
"def getBody\n body = \"\"\n @body.each { |part| body << part }\n body\n ensure\n @body.close if @body.respond_to?(:close)\n end",
"def html_body\n @html_body ||= reader_doc.content.strip\n end",
"def body\n @document.to_s\n end",
"def body\n @body ||= self.content.split(@@title_separator,2).last unless self.content.nil?\n end",
"def body\n @parsed_page.body\n end",
"def body_content\n end",
"def raw_body\n unless @raw_body\n @in.rewind\n @raw_body = @in.read(content_length)\n end\n\n @raw_body\n end",
"def body\n project.client.agent.get(self.url).at('div.description/p').inner_text\n end",
"def body_content\n raise NotImplementedError\n end"
] | [
"0.77817756",
"0.7737702",
"0.77064764",
"0.75504357",
"0.75313973",
"0.7431701",
"0.74137205",
"0.7396692",
"0.7342609",
"0.7332517",
"0.72996557",
"0.72766846",
"0.7207902",
"0.71907204",
"0.7170452",
"0.71642023",
"0.71642023",
"0.71642023",
"0.71642023",
"0.7137812",
"0.70710754",
"0.7038789",
"0.6999812",
"0.6975098",
"0.695094",
"0.6949357",
"0.6881075",
"0.6864065",
"0.68392044",
"0.68208647",
"0.6781583",
"0.6751549",
"0.67339784",
"0.67296934",
"0.67245066",
"0.6690405",
"0.66524637",
"0.6625499",
"0.65634304",
"0.654895",
"0.6527221",
"0.6507394",
"0.6507394",
"0.6483157",
"0.6481246",
"0.6478172",
"0.64480984",
"0.64431036",
"0.639709",
"0.63682675",
"0.6364263",
"0.63420093",
"0.6334426",
"0.6315038",
"0.62851554",
"0.62629217",
"0.6260399",
"0.62511003",
"0.62411743",
"0.6234938",
"0.62252975",
"0.62252975",
"0.6211465",
"0.6211409",
"0.61867064",
"0.6175829",
"0.6172905",
"0.6164499",
"0.6162182",
"0.6162182",
"0.6156601",
"0.6140443",
"0.6140443",
"0.6140153",
"0.6133555",
"0.61332345",
"0.6124593",
"0.61222994",
"0.6102041",
"0.60841566",
"0.6082408",
"0.60818034",
"0.6078464",
"0.6067346",
"0.606399",
"0.6058618",
"0.6046326",
"0.6044026",
"0.60299236",
"0.60292065",
"0.60281515",
"0.602298",
"0.6021242",
"0.5998608",
"0.5994853",
"0.5988012",
"0.5985975"
] | 0.7334725 | 12 |
Sets the body property value. The contents of the post. This is a default property. This property can be null. | def body=(value)
@body = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def body=(value)\n @body = value\n end",
"def body=(value)\n @body = value\n end",
"def set_body\n self.body = \"\" unless body\n body\n end",
"def set_body\n self.body = \"\" unless body\n body\n end",
"def set_body\n self.body = \"\" unless self.body\n end",
"def body=(new_body)\n attributes[:body] = new_body\n end",
"def set_body\n self.body = message_body\n end",
"def body(value = nil)\n if value\n @body = value\n else\n @body\n end\n end",
"def body(value = nil)\n if value\n self.body = value\n# add_encoding_to_body\n else\n process_body_raw if @body_raw\n @body\n end\n end",
"def body=(body)\n raise NotImplementedError, 'Subclasses must implement a body= method'\n end",
"def body=(newval)\n if self[:body] != newval\n # changed\n self[:body] = newval\n end\n self[:body]\n end",
"def set_body\n @body = Body.find(params[:id])\n end",
"def body(value = nil)\n\t\t\tvalue ? @body = value : @body ||= ''\n\t\tend",
"def body=(body)\n @body = DelayedBody.new{body}\n end",
"def body=(body); end",
"def set_body(options)\n Kickbox::HttpClient::RequestHandler.set_body(options)\n end",
"def body=(value)\n body_lazy(value)\n end",
"def body=(body)\n body = Array(body) unless body.respond_to?(:each)\n @_body = body\n end",
"def body=(body)\n body = Array(body) unless body.respond_to?(:each)\n @_body = body\n end",
"def set_body(body, content_type)\n @body = LoggingUtil.obfuscate_body(body)\n @content_type = content_type\n end",
"def body body=nil\n @response.body = body if body\n @response.body\n end",
"def set_body from_user\n self.body = message_body from_user\n end",
"def body(body)\n append(Body.new(body))\n end",
"def news_body=(text)\n self.body = text\n end",
"def update!(**args)\n @body = args[:body] if args.key?(:body)\n end",
"def update!(**args)\n @body = args[:body] if args.key?(:body)\n end",
"def set_request_body(obj)\n @body = obj\n end",
"def body(new = nil)\n if new.nil?\n @body\n else\n with(body: body.merge(new))\n end\n end",
"def post_with_body(body, user = nil)\n args = post_args.merge(raw: body)\n args[:user] = user if user.present?\n Fabricate.build(:post, args)\n end",
"def set_PostContent(value)\n set_input(\"PostContent\", value)\n end",
"def set_PostContent(value)\n set_input(\"PostContent\", value)\n end",
"def body_preview=(value)\n @body_preview = value\n end",
"def body_set(body)\n return dup_without_response.body_set(body) if response\n\n @body = body ? body.to_s : nil\n establish_content_length\n end",
"def post(body)\n Post.create!(sender_id: actor.id, body: body)\n end",
"def add_content\n if !body\n self.update_attributes(body: default_body_text)\n end\n end",
"def post_params\n params.permit(:body)\n end",
"def body=(new_body)\n # Set a request body\n if new_body.is_a?(IO)\n @body = file = new_body\n # Make sure the file is openned in binmode\n file.binmode if file.respond_to?(:binmode)\n # Fix 'content-length': it must not be bigger than a piece of a File left to be read or a String body size.\n # Otherwise the connection may behave like crazy causing 4xx or 5xx responses\n file_size = file.respond_to?(:lstat) ? file.lstat.size : file.size\n bytes_to_read = [ file_size - file.pos, self['content-length'].first ].compact.map{|v| v.to_i }.sort.first # remove nils then make values Integers\n if self['content-length'].first._blank? || self['content-length'].first.to_i > bytes_to_read\n self['content-length'] = bytes_to_read\n end\n else\n @body = new_body.to_s\n self['content-length'] = @body.bytesize if self['content-length'].first.to_i > @body.bytesize\n end\n end",
"def body\n @body || \"\"\n end",
"def set_body(match)\n if !match[:body].nil?\n @content_raw = match[:body].match(/\\{\\n((.|\\s)*)\\}/)[1]\n end\n end",
"def body=(i)\n if i.kind_of? ::String\n typecast(i)\n elsif i.kind_of? StructFu\n self[:body] = i\n elsif i.nil?\n self[:body] = StructFu::String.new.read(\"\")\n else\n raise ArgumentError, \"Can't cram a #{i.class} into a StructFu :body\"\n end\n end",
"def unique_body=(value)\n @unique_body = value\n end",
"def set_bodydatum\n @bodydatum = Bodydatum.find(params[:id])\n end",
"def set_body(body, content_type)\n if is_binary(content_type)\n @body = \"<binary content>\"\n else\n @body = @body_obfuscator.obfuscate_body(body)\n end\n @content_type = content_type\n end",
"def set_post\n @post = Post.new post_params\n end",
"def default_body_type=(body_type)\n @body_type = body_type\n end",
"def body(*args)\n return @body if args.empty?\n @body = args.first\n end",
"def body=(text)\n current_div.textarea(:id=>\"comp-body\").set text\n end",
"def body(request_body)\n @params[:body] = request_body\n end",
"def setCelestialBody(body)\n\t\tsetRawString('c:' + body.id)\n\tend",
"def post_params\n params.require(:post).permit(:body)\n end",
"def post_params\n params.require(:post).permit(:body)\n end",
"def force_post\n @type = \"post\"\n @post = \"\"\n end",
"def body\n read_attribute(:body) || ''\n end",
"def body\n self[:body]\n end",
"def body=(value)\n value = [] if !value || value == ''\n super(value.respond_to?(:each) ? value : [value.to_s])\n end",
"def body\n @body ||= process_text(@raw_body)\n end",
"def post_params\n params.require(:post).permit(:body)\n end",
"def body_params\n params.require(:body).permit(:name, :default_votes)\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def initialize(body)\n @body = body\n end",
"def customized_message_body=(value)\n @customized_message_body = value\n end",
"def post_params\n params.require(:post).permit(:user_id, :body)\n end",
"def new_body(text)\n @issue_body = text\n update_issue({ body: @issue_body })\n end",
"def default_body_obj(*args)\n Body.new(*args)\n end",
"def body\n @body_io.read.tap { @body_io.rewind }\n end",
"def body=(text)\n frm.frame(:id, \"body___Frame\").td(:id, \"xEditingArea\").frame(:index=>0).send_keys(text)\n end",
"def body\n if defined? @body\n @body\n else\n Null\n end\n end",
"def set_body_site\n @body_site = BodySite.find(params[:id])\n end",
"def set_body_of_water\n @body_of_water = BodyOfWater.find(params[:id])\n end",
"def set_request_body!(request); end",
"def body\n data[:body]\n end",
"def set_text(text)\n @body = TextElement.new(text)\n end",
"def update!(**args)\n @http_body = args[:http_body] if args.key?(:http_body)\n end",
"def body(value = (return @body unless defined?(yield); nil), &block)\n if block\n @body = DelayedBody.new(&block)\n else\n self.body = value\n end\n end",
"def body\n @attributes[:body]\n end",
"def copy_content_to_body\n self.body = self.content.to_plain_text\n end",
"def getBody\n @body\n end",
"def body\n process_message_body if !@body\n @body\n end",
"def set_body_work\n @body_work = BodyWork.find(params[:id])\n end",
"def body\n self.read_body\n self.get_body\n end",
"def body\n @body ||= @request.body.read\n end",
"def update\n title = params[:title]\n body = params[:body]\n\n @post.update!(title: title, body: body)\n\n if @post.save!\n json_response(@post)\n else\n json_response(@post.errors)\n end\n end",
"def body_lazy(value)\n process_body_raw if @body_raw && value\n case\n when value == nil || value.length<=0\n @body = Mail::Body.new('')\n @body_raw = nil\n add_encoding_to_body\n when @body && @body.multipart?\n @body << Mail::Part.new(value)\n add_encoding_to_body\n else\n @body_raw = value\n# process_body_raw\n end\n end",
"def body_handler(body_handler)\n @body_handler = body_handler\n self\n end",
"def body\n self['body']\n end",
"def set_bodypart\n @bodypart = Bodypart.find(params[:id])\n end",
"def body=(content)\n note_body = ENMLElement.new(colour)\n note_body.body = content\n\n @note.content = note_body.to_s\n end",
"def body\n @body ||= Samlr::Tools::RequestBuilder.build(options)\n end",
"def set_body_contents\n set_node(@template, 'cer|ApplicationRequest', @application_request.to_base64)\n set_node(@template, 'cer|SenderId', @customer_id)\n set_node(@template, 'cer|RequestId', request_id)\n set_node(@template, 'cer|Timestamp', iso_time)\n\n @template\n end",
"def body\n nil\n end",
"def body\n ret = read_attribute(:body)\n if ret.nil?\n return ret\n end\n ret = ret.strip\n ret = ret.gsub(/(?:\\n\\s*){2,}/, \"\\n\\n\") # remove excess linebreaks that unnecessarily space it out\n ret\n end",
"def escape_body\n self.body = ERB::Util.html_escape(body)\n end",
"def posts=(value)\n @posts = value\n end",
"def posts=(value)\n @posts = value\n end",
"def body_unset\n body_set nil\n end"
] | [
"0.83940953",
"0.8258356",
"0.78562945",
"0.78562945",
"0.7826137",
"0.75900346",
"0.7557674",
"0.74825907",
"0.74683005",
"0.7433115",
"0.73629755",
"0.7320466",
"0.73097104",
"0.7208749",
"0.71763206",
"0.70647293",
"0.70517606",
"0.70163375",
"0.70163375",
"0.6828269",
"0.6710765",
"0.667883",
"0.6666899",
"0.66526526",
"0.6594659",
"0.6594659",
"0.65584385",
"0.6506666",
"0.64911467",
"0.6488644",
"0.6488644",
"0.6484329",
"0.64745516",
"0.6454991",
"0.6440466",
"0.6412754",
"0.6338253",
"0.631119",
"0.62583613",
"0.62337285",
"0.62089926",
"0.6183597",
"0.6178759",
"0.61785483",
"0.61714303",
"0.61597455",
"0.6156449",
"0.6141945",
"0.609711",
"0.60754824",
"0.60754824",
"0.6054728",
"0.6038802",
"0.6013585",
"0.59841704",
"0.5980673",
"0.5929197",
"0.5871603",
"0.5860125",
"0.5860125",
"0.5860125",
"0.5860125",
"0.5851686",
"0.58472514",
"0.5809283",
"0.5795791",
"0.5781892",
"0.57773966",
"0.576316",
"0.5759399",
"0.5745769",
"0.574011",
"0.5736771",
"0.5705568",
"0.56914335",
"0.5675644",
"0.5668156",
"0.56629646",
"0.565614",
"0.5651399",
"0.5645116",
"0.5639248",
"0.56297743",
"0.5607509",
"0.55886906",
"0.55812657",
"0.55783564",
"0.556078",
"0.5556712",
"0.5556639",
"0.55377406",
"0.55272406",
"0.55170006",
"0.5510447",
"0.5497295",
"0.5497295",
"0.5471178"
] | 0.834245 | 4 |
Instantiates a new post and sets the default values. | def initialize()
super
@odata_type = "#microsoft.graph.post"
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_post\n @post = Post.new post_params\n end",
"def new\n @post = Post.new()\n end",
"def new\n @post = Post.new()\n end",
"def new\n @post = Post.new()\n end",
"def new\n @post = Post.new()\n end",
"def new\n @post = Post.new #returns an empty post\n end",
"def new\n @post = Post.new #creates new instance of the/a post\n #since it's only creating an instance in memory have to create/post method create blw\n end",
"def new\t\n\t\t@post = Post.new\n\tend",
"def new \n\t\t@post = Post.new\n\tend",
"def create\n @post = Post.create!(post_params)\n end",
"def new \n @post = Post.new\n end",
"def new\n \t@post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n\t\t@post = Post.new\n\tend",
"def new\n # responsbile for creating a new instance of a post\n @post = Post.new\n end",
"def new\r\n @post = Post.new\r\n end",
"def new\n @post = Post.new\n\n assert(@post.kind_of?(Post))\n\n return @post\n end",
"def new\n\t @post = Post.new\n\tend",
"def newPost(title, contents, categories = nil, keywords = nil, created = nil, publish = 1, user = nil, password = nil)\n article = MetaWeblogStructs::Article.new\n article.title = title\n article.description = contents\n article.categories = categories if categories\n article.mt_keywords = keywords if keywords\n article.dateCreated = created if created\n\n @client.newPostStruct(@blog.blogid, user, password, article, publish)\n end",
"def new\n\t\t@post = Post.new \n\tend",
"def create\n @post = Post.new(post_params)\n\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new\n @post = Post.new\n end",
"def new \n @post = Post.new\n end",
"def new\n\t@post = Post.new\nend",
"def new\n @post = Post.new\n\n end",
"def new\n @post = Post.new\n\n end",
"def create_post(user_id, post_hash={})\n @post = Post.new(:user_id => user_id, :location_id => post_hash[:location_id], :text => post_hash[:text])\n @post.save\n end",
"def newPost(_, _, _, metaweblog_struct, _)\n post = Post.create(@db.content_path, metaweblog_struct)\n @db.refresh_post_paths\n\n run_user_cmd\n post['postid']\n end",
"def new\n @post = Post.new(name: params[:name],\n email: params[:email],\n year: params[:year],\n major: params[:major],\n Q1: params[:Q1],\n Q2: params[:Q2],\n Q3: params[:Q3],\n Q4: params[:Q4],\n Q5: params[:Q5],\n Q6: params[:Q6])\n end",
"def new\n\t\t@post = @document.build_post\n\tend",
"def set_defaults\n published_on = Time.now\n self.attributes = {:post_author => 1, :post_date => published_on, :post_date_gmt => published_on.getutc, \n :post_status => 'publish', :comment_status => 'closed',\n :ping_status => 'closed', :post_parent => 0, :menu_order => 0, :post_type => 'gd_place',\n :comment_count => 0, :post_content => '', \n :post_excerpt => '', :to_ping => '', :pinged => '', :post_content_filtered => ''}\n end",
"def new\n @post = Post.new\n @post.user ||= current_user\n end",
"def new\n @cat = Post.new\n end",
"def new_post *args\n groups(:studio1).posts.new *args\n end",
"def new\n @thread = Discussion.new\n @post = Post.new\n end",
"def create_post(message, author_name, author_email, author_url = nil, ip_address = nil, created_at = nil)\n new_post_hash = API.create_post(forum_key, id, message, author_name, author_email, author_url, ip_address, created_at)\n new_post = Post.new(new_post_hash.merge(default_hash))\n @posts << new_post if @posts\n new_post\n end",
"def set_post\n @post = Post.from_param(params[:post_id]) if params[:post_id].present?\n end",
"def initialize(post, options = {})\n @options = options\n empty!\n parse(post)\n end",
"def create\n @post = Post.create!(params[:post]) \n end",
"def set_post\n # just to hide ID's\n # and use uuid\n # for testing on heroku to avoid mass creation\n @post = Post.where(id: params[:id]).first || Post.where(uuid: params[:id]).first\n end",
"def new\n # build a 'temporary' post which is written to DB later (create-method)\n @project = Project.new\n end",
"def fresh\n\t@post = Post.new\nend",
"def new #http://localhost:3000/posts/new\n\t@post = Post.new #creates a new empty post,if a form is created for it, it will be empty as well \n end",
"def initialize(post)\n raise NoDataError if post.to_s.empty?\n\n @params = {}\n @raw = \"\"\n\n parse(post)\n end",
"def create\n \n unless current_user.can_post\n redirect_to welcome_page_path\n end\n \n @post = Post.new\n @post.user = current_user\n @post.privly_application = params[:post][:privly_application]\n\n # Posts default to Private\n if params[:post][:public]\n @post.public = params[:post][:public]\n else\n @post.public = false\n end\n\n set_burn_date\n \n # The random token will be required for users other than the owner\n # to access the content. The model will generate a token before saving\n # if it is not assigned here.\n @post.random_token = params[:post][:random_token]\n \n @post.update_attributes(params[:post])\n \n respond_to do |format|\n if @post.save\n response.headers[\"X-Privly-Url\"] = @post.privly_URL\n format.any { render :json => get_json, \n :status => :created, :location => @post }\n else\n format.any { render :json => @post.errors, \n :status => :unprocessable_entity }\n end\n end\n end",
"def new\n @robots=\"noindex,nofollow\"\n authorize! :create, Roxiware::Blog::Post\n @post = Roxiware::Blog::Post.new({:person_id=>current_user.person.id,\n :blog_class=>(params[:blog_class] || \"blog\"),\n :post_date=>DateTime.now.utc,\n :post_content=>\"\",\n :post_title=>\"\",\n :post_status=>\"publish\"}, :as=>\"\")\n\n # We need to pass the post category in separately as on new post creation, the\n # category joins are not yet created for the post.\n @post_category = Roxiware::Param::Param.application_param_val('blog', 'default_category')\n respond_to do |format|\n format.html { render :partial =>\"roxiware/blog/post/editform\" }\n format.json { render :json => @post.ajax_attrs(@role) }\n end\n end",
"def initialize(title) #initialize the post class with a title\n @title = title\n end",
"def create\n post = Post.new\n post.title = params[:title]\n post.description = params[:description]\n post.save\n end",
"def new\n @topic = Topic.find(params[:topic_id])\n @post = Post.new\n#create an instance variable, @post, then assign it an empty post returned by Post.new\n end",
"def create\n\n\t\t@post = Post.new(post_params)\n\t\tif @post.tag == \"\"\n\t\t\tredirect_to :back\n\t\telsif @post.body == \"\"\n\t\t\tredirect_to :back\n\t\telse\n\t\t\t@post.save\n\t\t\tredirect_to \"#\"\n\t\t\tflash[:info] = \"Post submited!\"\n\t\tend\n\tend",
"def new\n @post = Post.new\nend",
"def new\n\t\t#Generate uuid for majorpost\n\t\tgenerate_uuid!\n\t\t@majorpost = Majorpost.new(\n\t\t\t\tuser_id: @user.id,\n\t\t\t\tuuid: @uuid\n\t\t\t)\n\t\t@upload_url = '/content/artworks/medium_editor_upload_artwork/'+@majorpost.uuid\n\tend",
"def create_post\n post_title = parse_html.css('title')[0].text\n post_url = @url\n points = parse_html.search('.subtext > span:first-child').map { |span| span.inner_text}\n post_points = points[0]\n all_ids = parse_html.search('tr.athing td center a').map { |link| link['id']}\n post_id = all_ids[0].gsub(/[[a-z]_\" \"\"]/,\"\") # taken from the first upvote button\n @post = Post.new(post_title, post_url, post_points, post_id, parse_comments)\n end",
"def new\n @post = Post.new(content: params[:content])\n end",
"def initialize #initializes post belonging to author \r\n @post = author\r\n end",
"def initialize\n\t\t@created_at = Time.now\n\t\tputs \"Name this blog post:\"\n\t\t@title = gets.chomp\n\t\t\n\t\tputs \"Add content for this blog post:\"\n\t\t@content = gets.chomp\n\tend",
"def createPost(post_params)\n post = Post.new(post_params)\n post.status = 1\n isSavePost = PostRepository.createPost(post)\n end"
] | [
"0.77533215",
"0.7449862",
"0.7398832",
"0.7398832",
"0.7398832",
"0.7389544",
"0.73541325",
"0.72788274",
"0.7187685",
"0.715627",
"0.7130591",
"0.70875096",
"0.70814353",
"0.70811623",
"0.70811623",
"0.70811623",
"0.70811623",
"0.70811623",
"0.70811623",
"0.7052429",
"0.70472383",
"0.701097",
"0.70073783",
"0.6975101",
"0.69750637",
"0.6940209",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876962",
"0.6876644",
"0.68646944",
"0.68385535",
"0.67388344",
"0.67388344",
"0.67350554",
"0.6733743",
"0.6729051",
"0.66734254",
"0.6663011",
"0.6634745",
"0.6613083",
"0.6568284",
"0.65569353",
"0.65111613",
"0.6484161",
"0.64577615",
"0.64513636",
"0.6451128",
"0.64327395",
"0.64141554",
"0.64132047",
"0.6401421",
"0.64009047",
"0.6400795",
"0.63714254",
"0.6363123",
"0.6353764",
"0.6353143",
"0.63465303",
"0.6337077",
"0.6323934",
"0.63177043",
"0.6302032",
"0.6276738",
"0.62494034"
] | 0.0 | -1 |
Gets the conversationId property value. Unique ID of the conversation. Readonly. | def conversation_id
return @conversation_id
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_thread_id\n return @conversation_thread_id\n end",
"def chat_id\n return @chat_id\n end",
"def chat_id\n return @chat_id\n end",
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def id\n messaging['id']\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def message_id\n return @message_id\n end",
"def message_id\n @message_id\n end",
"def message_id\n @message_id ||= message.message_id\n end",
"def message_id\n data[:message_id]\n end",
"def id\n @message[:id]\n end",
"def message_id\n self['message-id']\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def conversation_person\n\t\treturn nil unless (mode? :conversation)\n\t\treturn @talking_to\n\tend",
"def get_conversation(id)\n get(\"conversations/#{id}\")\n end",
"def message_id\n headers['Message-Id'] || msg['_id'] || self['_id']\n end",
"def recipient_id\n @row[\"配送先ID\"].to_s\n end",
"def conversation_member_user\n return @conversation_member_user\n end",
"def message_id\n @mail.message_id\n end",
"def conversation_index\n return @conversation_index\n end",
"def message_id=(value)\n @message_id = value\n end",
"def user_id\n @message[:user][:id]\n end",
"def participant_id\n return @participant_id\n end",
"def participant_id\n return @participant_id\n end",
"def has_conversation?(conversation_id)\r\n self.conversations.has_key?(conversation_id.to_s)\r\n end",
"def network_message_id\n return @network_message_id\n end",
"def get(id)\n Conversation.from_id(@client, id)\n end",
"def message_id; @message_impl.getMessageId; end",
"def id\r\n return @actor_id\r\n end",
"def user_id; @message_impl.getUserId; end",
"def id\n reply.documents[0][ID]\n end",
"def get_id()\n return @id\n end",
"def reply_chain_message_id\n return @reply_chain_message_id\n end",
"def particular_conversation\n @this_conversation = Conversation.find(params[:id])\n end",
"def message_id\n\t\tmessage_id = self.headers[\"Message-ID\"]\n\t\tmessage_id.nil? || message_id.empty? ? message_id : nil\n\tend",
"def get_id(channel_name)\n return get_value_of(channel_name, :id)\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def id\n self._id.to_s if self._id\n end",
"def my_participant_id\n return @my_participant_id\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def get_id\n return @m_id\n end",
"def get_id\n return @m_id\n end",
"def get_id\n return @m_id\n end",
"def get_id\n return @m_id\n end",
"def get_id\n return @m_id\n end",
"def set_conversation_message\n @conversation_message = ConversationMessage.find(params[:id])\n end",
"def set_conversation\n\t\t@conversation = Conversation.find(params[:id])\n\tend",
"def conversation\n Conversation\n .where(\"creator_id = ? or member_id = ?\", user_id, user_id)\n .order(\"latest_message_id DESC\").first\n end",
"def original_message_id \n @private_messages.first.original_message_id # All threaded messages share the same original ID, so we'll just call the first record\n end",
"def call_id\n return @call_id\n end",
"def get_conversation(id, options = nil)\r\n @client.raw('get', \"/content/conversations/#{id}\", options, nil, @contact_v1_url)\r\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end",
"def getId()\n\t\t\treturn @_id\n\t\tend",
"def creator_conversation_ids\n self.creator ?\n self.creator.conversation_ids + self.creator.created_conversation_ids : []\n end",
"def _id\n @attributes && @attributes[:_id]\n end",
"def get_message_id\n return message_id if Rails.env.production?\n '2b4a4e966e0300b90a00c28b714f1c38@masamigos.cl'\n end",
"def id\n return self.topic['id'].to_i\n end",
"def correlation_id; @message_impl.getCorrelationId; end",
"def transaction_id\n return nil if grpc.nil?\n grpc.id\n end",
"def conversation\n message.conversation if message.is_a? Mailboxer::Message\n end",
"def conversation\n message.conversation if message.is_a? Mailboxer::Message\n end",
"def get_id()\n return @id\n end",
"def set_message_id\n self.message_id ||= @cur_message.id\n end",
"def activity_identifier\n return @activity_identifier\n end",
"def user_id\n case @params.event\n when 'conversation_started', 'subscribed'\n @params.user.id\n when 'unsubscribed', 'delivered', 'seen', 'failed'\n @params.user_id\n when 'message'\n @params.sender.id\n else\n @params.dig(:user, :id)\n end\n end",
"def notification_id\n @id\n end",
"def conversation_index=(value)\n @conversation_index = value\n end",
"def to_i\n @id\n end",
"def to_i\n @id\n end",
"def to_i\n @id\n end",
"def to_i\n @id\n end",
"def inbox_id\n @attributes[:inbox_id]\n end",
"def number\n @number || (conversation.try(:number))\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def id\n read_attribute(self.class.primary_key)\n end",
"def id\n read_attribute(self.class.primary_key)\n end",
"def network_message_id=(value)\n @network_message_id = value\n end",
"def correlation_id\n return @correlation_id\n end"
] | [
"0.804745",
"0.804745",
"0.7515663",
"0.6822819",
"0.6822819",
"0.6656735",
"0.66101885",
"0.6587035",
"0.6587035",
"0.6559587",
"0.6471356",
"0.64310956",
"0.6400675",
"0.62336504",
"0.6226752",
"0.6222848",
"0.6222848",
"0.6192165",
"0.61167383",
"0.60516506",
"0.6032556",
"0.5966325",
"0.5959612",
"0.59457326",
"0.59358484",
"0.58779216",
"0.5863278",
"0.5863278",
"0.5859993",
"0.58452475",
"0.5833849",
"0.5830439",
"0.5820384",
"0.5803974",
"0.5797731",
"0.57660085",
"0.56995994",
"0.5699401",
"0.56987995",
"0.56938136",
"0.5675827",
"0.5671851",
"0.5664972",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56295544",
"0.56203187",
"0.56203187",
"0.56203187",
"0.56203187",
"0.56203187",
"0.56170905",
"0.56131935",
"0.5585668",
"0.5571749",
"0.55123377",
"0.55020726",
"0.5489201",
"0.5489201",
"0.5489201",
"0.5489201",
"0.5489201",
"0.54866713",
"0.5477624",
"0.5469475",
"0.5464132",
"0.5441772",
"0.54405224",
"0.54344904",
"0.5406242",
"0.53876233",
"0.53876233",
"0.5377102",
"0.5365197",
"0.53467983",
"0.5313342",
"0.531244",
"0.5309284",
"0.5308873",
"0.5308873",
"0.5308873",
"0.5308873",
"0.5307724",
"0.5303077",
"0.52551067",
"0.52347785",
"0.52347785",
"0.5228351",
"0.52195525"
] | 0.86132896 | 1 |
Sets the conversationId property value. Unique ID of the conversation. Readonly. | def conversation_id=(value)
@conversation_id = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def set_conversation\n\t\t@conversation = Conversation.find(params[:id])\n\tend",
"def set_conversation_message\n @conversation_message = ConversationMessage.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation_user\n @conversation_user = ConversationUser.find(params[:id])\n end",
"def conversation_id\n return @conversation_id\n end",
"def conversation_id\n return @conversation_id\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def set_user_conversation\n @user_conversation = UserConversation.find(params[:id])\n end",
"def update_conversation(id, data)\r\n @client.raw('put', \"/content/conversations/#{id}\", nil, data_transform(data), @contact_v1_url)\r\n end",
"def set_message_id\n self.message_id ||= @cur_message.id\n end",
"def update_conversation(id, data)\n put(\"conversations/#{id}\", { body: data })\n end",
"def message_id=(value)\n @message_id = value\n end",
"def conversation_index=(value)\n @conversation_index = value\n end",
"def delete_conversation(id)\n @client.raw('delete', \"/content/conversations/#{id}\")\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def delete_conversation(id)\n delete(\"conversations/#{id}\")\n end",
"def set_conversation\n @issue = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.where(featured: true).find(params[:id])\n end",
"def set_task\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation_data(opts = {})\n uri = \"/v3/botstate/#{opts['channel_id']}/conversations/#{opts['conversation_id']}\"\n api_post(uri, opts['bot_data'])\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def has_conversation?(conversation_id)\r\n self.conversations.has_key?(conversation_id.to_s)\r\n end",
"def conversation_thread_id\n return @conversation_thread_id\n end",
"def set_chat_message\n @chat_message = ChatMessage.find(params[:id])\n end",
"def my_participant_id=(value)\n @my_participant_id = value\n end",
"def set_ForChannelId(value)\n set_input(\"ForChannelId\", value)\n end",
"def set_ForChannelId(value)\n set_input(\"ForChannelId\", value)\n end",
"def set_ForChannelId(value)\n set_input(\"ForChannelId\", value)\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def correlation_id=(correlation_id); @message_impl.setCorrelationId correlation_id; end",
"def conversation_member_user=(value)\n @conversation_member_user = value\n end",
"def call_id=(value)\n @call_id = value\n end",
"def set_chat\n @chat = Chat.find(params[:id])\n end",
"def set_coffee_chat\n @coffee_chat = CoffeeChat.find(params[:id])\n end",
"def user_id=(user_id); @message_impl.setUserId user_id; end",
"def participant_id=(value)\n @participant_id = value\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def conversation_params\n params.require(:conversation).permit(:recipient_id, :sender_id)\n end",
"def particular_conversation\n @this_conversation = Conversation.find(params[:id])\n end",
"def set_direct_chat\n @direct_chat = DirectChat.find(params[:id]) rescue nil\n return res_with_error(\"Chat not found\", :not_found) unless @direct_chat\n end",
"def update_conversation_status(id, data)\r\n @client.raw('put', \"/content/conversations/#{id}/status\", nil, data_transform(data), @contact_v1_url)\r\n end",
"def set_support_conversation\n @support_conversation = SupportConversation.find(params[:id])\n end",
"def mergeId=(value)\n\t\t\t@mergeId = value\n\t\tend",
"def chat_id\n return @chat_id\n end",
"def chat_id\n return @chat_id\n end",
"def message_id=(message_id); @message_impl.setMessageId message_id.to_s; end",
"def assign_conversation\n self.conversation = parent.nil? ? Conversation.create(:sender => sender, :recipient => recipient) : parent.conversation\n #self.conversation = parent.nil? ? Conversation.create :\n # parent.conversation\n end",
"def patch_conversations_chat(conversation_id, body, opts = {})\n data, _status_code, _headers = patch_conversations_chat_with_http_info(conversation_id, body, opts)\n return data\n end",
"def get_conversation(id)\n get(\"conversations/#{id}\")\n end",
"def load_conversation\n @conversation = ConversationService.new(params[:id]).call if params[:id]\n end",
"def set_CommentId(value)\n set_input(\"CommentId\", value)\n end",
"def set_id(id)\n unless id.nil?\n @id = id\n end\n @id\n end",
"def _id=(id)\n @_id = id.to_s # in case of BSON::ObjectId\n end",
"def edit\n @conversation = Conversation.find(params[:id])\n end",
"def set_recipient\n @recipient = Recipient.find(params[:id])\n end",
"def set_recipient\n @recipient = Recipient.find(params[:id])\n end",
"def send(conversation_id, activity)\n uri = \"/v3/conversations/#{conversation_id}/activities\"\n api_post(uri, activity.as_json)\n end",
"def set_CampaignID(value)\n set_input(\"CampaignID\", value)\n end",
"def sender_user_id=(value)\n @sender_user_id = value\n end",
"def conversation_update\n if params[:message][:conversation_line_id]\n #if message_id set then edit the message\n message = Message.find(params[:message][:conversation_line_id])\n\n message.update(\n message_params.merge(updated_at: Time.at(params[:message][:timestamp].to_i))\n )\n\n # Update expiry date of messages on conversations lines\n message.conversation_line_messages.update(\n message_params.merge(updated_at: Time.at(params[:message][:timestamp].to_i))\n )\n\n message.current_user = current_user\n render json: {\n success: true\n }\n else\n render json: {\n success: false\n }\n end\n end",
"def network_message_id=(value)\n @network_message_id = value\n end",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend",
"def setId(id)\r\n\t\t\t\t\t@id = id\r\n\t\t\t\tend"
] | [
"0.6950916",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.69257045",
"0.687652",
"0.68558764",
"0.6766833",
"0.6637297",
"0.6637297",
"0.6637297",
"0.6637297",
"0.6637297",
"0.6608538",
"0.6599302",
"0.6599302",
"0.6576756",
"0.6576756",
"0.65400136",
"0.59823614",
"0.5941447",
"0.58882",
"0.5888172",
"0.57368404",
"0.56643516",
"0.5648963",
"0.5614987",
"0.55696785",
"0.55672234",
"0.5564773",
"0.55511606",
"0.55420774",
"0.55420774",
"0.5534886",
"0.5383924",
"0.5380386",
"0.5356126",
"0.5325317",
"0.5325317",
"0.5325317",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.53092927",
"0.5261214",
"0.52484065",
"0.522179",
"0.52205",
"0.5215822",
"0.51818687",
"0.5180682",
"0.5180682",
"0.5171648",
"0.5153544",
"0.51376176",
"0.51307744",
"0.51233506",
"0.51158726",
"0.5107851",
"0.5107851",
"0.51037157",
"0.5102909",
"0.50955176",
"0.50936604",
"0.50880754",
"0.50808513",
"0.50624126",
"0.50516146",
"0.50283706",
"0.5026766",
"0.5026766",
"0.50235415",
"0.502162",
"0.5011941",
"0.5009698",
"0.49954963",
"0.49945274",
"0.49945274",
"0.49945274",
"0.49945274",
"0.49945274",
"0.49945274",
"0.49945274"
] | 0.8422973 | 1 |
Gets the conversationThreadId property value. Unique ID of the conversation thread. Readonly. | def conversation_thread_id
return @conversation_thread_id
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conversation_thread_id=(value)\n @conversation_thread_id = value\n end",
"def conversation_id\n return @conversation_id\n end",
"def conversation_id\n return @conversation_id\n end",
"def thread_id\n return @thread_id\n end",
"def thread_id\n check_connection\n @protocol.thread_id\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def chat_id\n return @chat_id\n end",
"def chat_id\n return @chat_id\n end",
"def thread_id=(value)\n @thread_id = value\n end",
"def thread_id\n\n\t\t::Pantheios::Core.thread_id\n\tend",
"def conversation_member_user\n return @conversation_member_user\n end",
"def thread_id()\n #This is a stub, used for indexing\n end",
"def getThreadId(currentComment)\n current_new_id = currentComment.new_id\n while current_new_id.nil?\n currentComment = Comment.find(currentComment.comment_id)\n current_new_id = currentComment.new_id\n end \n return current_new_id\n end",
"def conversation_person\n\t\treturn nil unless (mode? :conversation)\n\t\treturn @talking_to\n\tend",
"def reply_chain_message_id\n return @reply_chain_message_id\n end",
"def message_id\n return @message_id\n end",
"def activity_identifier\n return @activity_identifier\n end",
"def participant_id\n return @participant_id\n end",
"def participant_id\n return @participant_id\n end",
"def message_id\n @message_id\n end",
"def network_message_id\n return @network_message_id\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def conversation\n @conversation ||= mailbox.conversations.find(params[:id])\n end",
"def message_id\n @mail.message_id\n end",
"def my_participant_id\n return @my_participant_id\n end",
"def id\r\n return @actor_id\r\n end",
"def message_id\n @message_id ||= message.message_id\n end",
"def message_id\n self['message-id']\n end",
"def conversation_index\n return @conversation_index\n end",
"def mailbox_mail_threads_id_get(id, opts = {})\n mailbox_mail_threads_id_get_with_http_info(id, opts)\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def message_id\n data[:message_id]\n end",
"def id\n messaging['id']\n end",
"def recipient_id\n @row[\"配送先ID\"].to_s\n end",
"def conversation\n message.conversation if message.is_a? Mailboxer::Message\n end",
"def conversation\n message.conversation if message.is_a? Mailboxer::Message\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end",
"def user_id\n @message[:user][:id]\n end",
"def inbox_id\n @attributes[:inbox_id]\n end",
"def sender_user_id\n return @sender_user_id\n end",
"def message_id\n headers['Message-Id'] || msg['_id'] || self['_id']\n end",
"def user_id; @message_impl.getUserId; end",
"def original_message_id \n @private_messages.first.original_message_id # All threaded messages share the same original ID, so we'll just call the first record\n end",
"def getWorkflowId()\n @workflowId\n end",
"def chat_type\n return @chat_type\n end",
"def team_id\n return @team_id\n end",
"def conversation_member_roles\n return @conversation_member_roles\n end",
"def app_activity_id\n return @app_activity_id\n end",
"def message_id; @message_impl.getMessageId; end",
"def get_thread_details(username, thread_id)\n $LOG.i \"running \" + __method__.to_s\n @client.get '/mc/v1/threads/' + username + '/' + thread_id\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def correlation_id\n return @correlation_id\n end",
"def conversation\n Conversation\n .where(\"creator_id = ? or member_id = ?\", user_id, user_id)\n .order(\"latest_message_id DESC\").first\n end",
"def message_id\n\t\tmessage_id = self.headers[\"Message-ID\"]\n\t\tmessage_id.nil? || message_id.empty? ? message_id : nil\n\tend",
"def get_conversation(id, embed_threads: false)\n if embed_threads\n get(\"conversations/#{id}?embed=threads\")\n else\n get(\"conversations/#{id}\")\n end\n end",
"def get_reply_to\n @reply_to\n end",
"def reply_to\n return @reply_to\n end",
"def in_reply_to\n return @in_reply_to\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def thread_uuid thr = nil\n thr ||= Thread.current\n @@thread_uuid_mutex.synchronize do\n thr[:'ASIR::UUID.thread_uuid'] ||= counter_uuid\n end\n end",
"def thread_context(tid)\r\n Ragweed::Wrap32::open_thread(tid) do |h|\r\n Ragweed::Wrap32::get_thread_context(h)\r\n end\r\n end",
"def value\n val = @thread.value\n unset_thread_task\n val\n end",
"def correlation_id; @message_impl.getCorrelationId; end",
"def role_template_id\n return @role_template_id\n end",
"def call_chain_id\n return @call_chain_id\n end",
"def call_chain_id\n return @call_chain_id\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def actor_id_from_cmd\n @command_window.actors[@command_window.index]\n end",
"def creator_conversation_ids\n self.creator ?\n self.creator.conversation_ids + self.creator.created_conversation_ids : []\n end",
"def thread_address(thread)\n \"<thread-#{thread.public_token}@#{domain}>\"\n end",
"def call_id\n return @call_id\n end",
"def internet_message_id\n return @internet_message_id\n end",
"def internet_message_id\n return @internet_message_id\n end",
"def uid\n \"#{Socket.gethostname}-#{Process.pid}-#{thread_id}\"\n end",
"def guid\n activity_object.id\n end",
"def get_user_thread(user_id)\n user_threads_lock.synchronize do\n user_threads[user_id]\n end\n end",
"def workflow_id\n self.decision_context.workflow_context.decision_task.workflow_execution.workflow_id\n end",
"def message_id=(value)\n @message_id = value\n end",
"def get_bot_user_id\n 1.freeze\n end"
] | [
"0.7625181",
"0.74000466",
"0.74000466",
"0.72015804",
"0.6623548",
"0.63250613",
"0.63250613",
"0.6158092",
"0.6158092",
"0.61338323",
"0.60831684",
"0.5995234",
"0.59119195",
"0.5764345",
"0.5680798",
"0.565587",
"0.5647614",
"0.5541468",
"0.5510627",
"0.5510627",
"0.5431496",
"0.54312617",
"0.5391795",
"0.5391795",
"0.53007007",
"0.52947086",
"0.5292014",
"0.52833766",
"0.5271111",
"0.52562964",
"0.52369267",
"0.52149075",
"0.52149075",
"0.5207145",
"0.5173765",
"0.51695424",
"0.5143542",
"0.5143542",
"0.5123223",
"0.5082102",
"0.50638187",
"0.5050258",
"0.5019154",
"0.5012804",
"0.50057423",
"0.49595404",
"0.49065435",
"0.48704678",
"0.48663974",
"0.48634493",
"0.48515826",
"0.48354346",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48311523",
"0.48287287",
"0.4825159",
"0.48226532",
"0.48045787",
"0.4779442",
"0.4762524",
"0.4759658",
"0.47580847",
"0.47521225",
"0.47233346",
"0.47152787",
"0.4707567",
"0.470561",
"0.4700671",
"0.4700671",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46997333",
"0.46970198",
"0.46726674",
"0.46670124",
"0.46627122",
"0.46615785",
"0.46615785",
"0.464772",
"0.46405393",
"0.46385762",
"0.46252817",
"0.46094084",
"0.46076876"
] | 0.8546284 | 0 |
Sets the conversationThreadId property value. Unique ID of the conversation thread. Readonly. | def conversation_thread_id=(value)
@conversation_thread_id = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_id=(value)\n @conversation_id = value\n end",
"def conversation_thread_id\n return @conversation_thread_id\n end",
"def thread_id=(value)\n @thread_id = value\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadID(value)\n set_input(\"ThreadID\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_ThreadIdentifier(value)\n set_input(\"ThreadIdentifier\", value)\n end",
"def set_thread\n @thread = Threads.find(params[:id])\n end",
"def set_conversation_user\n @conversation_user = ConversationUser.find(params[:id])\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def chat_id=(value)\n @chat_id = value\n end",
"def conversation_id\n return @conversation_id\n end",
"def conversation_id\n return @conversation_id\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_user_conversation\n @user_conversation = UserConversation.find(params[:id])\n end",
"def set_conversation\n\t\t@conversation = Conversation.find(params[:id])\n\tend",
"def set_message_thread\n @message_thread = @current_shop.message_threads.find(params[:id])\n end",
"def set_conversation_message\n @conversation_message = ConversationMessage.find(params[:id])\n end",
"def set_Thread(value)\n set_input(\"Thread\", value)\n end",
"def set_Thread(value)\n set_input(\"Thread\", value)\n end",
"def set_user_thread\n @user_thread = @topic.user_threads.find(params[:id])\n end",
"def set_thread_for_replies\n self.thread = self.commentable.thread if self.reply_comment?\n end",
"def set_thread_object\n @thread_object = ThreadObject.find(params[:id])\n end",
"def conversation_member_user=(value)\n @conversation_member_user = value\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def set_conversation\n @conversation = Conversation.find(params[:id])\n end",
"def update_thread\n self.update_attribute(:thread, self.id) unless self.thread\n end",
"def set_thread_obj\n @thread_obj = ThreadObj.find(params[:id])\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def set_forum_thread\n @forum_thread = ForumThread.find(params[:id])\n end",
"def set_user_thread(user_id, user_thread)\n user_threads_lock.synchronize do\n user_threads[user_id] = user_thread\n end\n end",
"def set_forum_thread\n @forum_thread = Forum::Thread.find(params[:id])\n end",
"def set_app_thread\n @app_thread = @project.app_threads.find(params[:id])\n end",
"def thread_id\n return @thread_id\n end",
"def conversation_index=(value)\n @conversation_index = value\n end",
"def set_task\n @conversation = Conversation.find(params[:id])\n end",
"def set_forum_thread\n @forum_thread = ForumThread.find(params[:id])\n end",
"def set_project_thread\n @project_thread = ProjectThread.find(params[:id])\n end",
"def set_board_thread\n @board_thread = BoardThread.find(params[:id])\n end",
"def set_conversation\n @issue = Conversation.find(params[:id])\n end",
"def reply_to_thread(username, thread_id, params = {})\n $LOG.i \"running \" + __method__.to_s\n @client.post '/mc/v1/threads/' + username + '/' + thread_id, params\n end",
"def set_viewthread\n @viewthread = Viewthread.find(params[:id])\n end",
"def set_forum_thread\n @forum_thread = ForumThread.find(params[:forum_thread_id])\n end",
"def mark_thread(username, thread_id, params = {})\n $LOG.i \"running \" + __method__.to_s\n @client.put '/mc/v1/threads/' + username + '/' + thread_id, params\n end",
"def set_custom_thread\n @custom_thread = CustomThread.find(params[:id])\n end",
"def set_thread_subscription(thread_id, options = {})\n put \"notifications/threads/#{thread_id}/subscription\", options\n end",
"def patch_conversations_chat_participant_attributes(conversation_id, participant_id, body, opts = {})\n patch_conversations_chat_participant_attributes_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def conversation_member_roles=(value)\n @conversation_member_roles = value\n end",
"def my_participant_id=(value)\n @my_participant_id = value\n end",
"def sender_user_id=(value)\n @sender_user_id = value\n end",
"def threads=(value)\n value = value.to_i\n value = 1 if value < 1\n @threads = value\n end",
"def set_conversation_data(opts = {})\n uri = \"/v3/botstate/#{opts['channel_id']}/conversations/#{opts['conversation_id']}\"\n api_post(uri, opts['bot_data'])\n end",
"def set_threading\n @threading = Threading.find(params[:id])\n end",
"def set_debugger_thread(thr)\n raise TypeError, \"Must be another Thread\" unless thr.kind_of?(Thread)\n\n @debugger_thread = thr\n end",
"def assign_conversation\n self.conversation = parent.nil? ? Conversation.create(:sender => sender, :recipient => recipient) : parent.conversation\n #self.conversation = parent.nil? ? Conversation.create :\n # parent.conversation\n end",
"def set_post_thread\n @post_thread = PostThread.find(params[:id])\n end",
"def set_post_thread\n @post_thread = PostThread.find(params[:id])\n end",
"def set_activity_participant\n @activity_participant = current_user\n end",
"def set_chat_message\n @chat_message = ChatMessage.find(params[:id])\n end",
"def chat_id\n return @chat_id\n end",
"def chat_id\n return @chat_id\n end",
"def patch_conversation_participant(conversation_id, participant_id, body, opts = {})\n patch_conversation_participant_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def chat=(value)\n @chat = value\n end",
"def conversation_update\n if params[:message][:conversation_line_id]\n #if message_id set then edit the message\n message = Message.find(params[:message][:conversation_line_id])\n\n message.update(\n message_params.merge(updated_at: Time.at(params[:message][:timestamp].to_i))\n )\n\n # Update expiry date of messages on conversations lines\n message.conversation_line_messages.update(\n message_params.merge(updated_at: Time.at(params[:message][:timestamp].to_i))\n )\n\n message.current_user = current_user\n render json: {\n success: true\n }\n else\n render json: {\n success: false\n }\n end\n end",
"def set(*args)\n options = args.last.is_a?(Hash) ? args.pop : {}\n thread = args.first\n options.merge!(:thread => thread) if ([:ident, :link] & options.keys).empty?\n response = get('threads/set', options)\n end",
"def set_message\n @message = @app_thread.messages.find(params[:id])\n end"
] | [
"0.68099767",
"0.68099767",
"0.6674897",
"0.65233916",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.5958593",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.59405696",
"0.5720642",
"0.57191885",
"0.5673249",
"0.5673249",
"0.56172055",
"0.56172055",
"0.5592871",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.55329686",
"0.5522302",
"0.549646",
"0.5449447",
"0.5447142",
"0.5435934",
"0.5435934",
"0.53976977",
"0.5323116",
"0.53152895",
"0.52833176",
"0.5244497",
"0.5244497",
"0.5244497",
"0.5244497",
"0.5244497",
"0.5243711",
"0.51986396",
"0.51779604",
"0.51490325",
"0.5138908",
"0.5118542",
"0.50893795",
"0.5056177",
"0.505431",
"0.50531363",
"0.50428396",
"0.50068873",
"0.49884927",
"0.4971936",
"0.49601692",
"0.49430433",
"0.49132589",
"0.4905989",
"0.49026635",
"0.48468605",
"0.48161945",
"0.48083273",
"0.4795848",
"0.4762171",
"0.47568166",
"0.47335753",
"0.47110263",
"0.46964973",
"0.46956736",
"0.46883157",
"0.46883157",
"0.4671111",
"0.4665061",
"0.464571",
"0.464571",
"0.4641568",
"0.46394983",
"0.46205384",
"0.46171495",
"0.46115"
] | 0.8150457 | 0 |
Gets the extensions property value. The collection of open extensions defined for the post. Readonly. Nullable. Supports $expand. | def extensions
return @extensions
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extensions\n @@extensions\n end",
"def extensions\n @@extensions\n end",
"def extensions\n @extensions.to_a\n end",
"def extensions\n data.extensions\n end",
"def extensions\n @extensions ||= []\n end",
"def extensions\n @extensions ||= []\n end",
"def extensions\n @extensions ||= []\n end",
"def get_extensions\n get_siteinfo('extensions')['query']['extensions'].collect { |e| e['name'] }\n end",
"def extensions\n []\n end",
"def extensions\n []\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions\n @config[:extensions]\n end",
"def extensions\n self.class.extensions\n end",
"def extensions\n self.class.extensions\n end",
"def list\n @@extensions\n end",
"def extension_properties\n return @extension_properties\n end",
"def extensions=(value)\n subtags = Array(value).flatten\n self.extensions_sequence = subtags.empty? ? nil : subtags.join(HYPHEN)\n end",
"def extensions=(val)\n set_extensions(val)\n val\n end",
"def extensions\n if wrapper\n (wrapper.extensions + config[:extensions]).uniq\n else\n config[:extensions]\n end\n end",
"def extensions\n @extensions ||= Dir[File.join(@ext_dir, '*')]\n end",
"def extensions\r\n e = []\r\n @extensions.each_key do |k|\r\n e.push k\r\n end\r\n return e\r\n end",
"def extensions\n TYPE_EXTS[type] || []\n end",
"def extensions\n form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'extensions' }\n res = make_api_request(form_data)\n REXML::XPath.match(res, \"//ext\").inject(Hash.new) do |extensions, extension|\n name = extension.attributes[\"name\"] || \"\"\n extensions[name] = extension.attributes[\"version\"]\n extensions\n end\n end",
"def supported_extensions\n\t\treturn self.supported_extension_oids.collect {|oid| EXTENSION_NAMES[oid] || oid }\n\tend",
"def extensions\n unless @extensions\n @extensions={}\n cert.extensions.each {|e| @extensions[e.oid]=e.value} if cert.extensions\n end\n @extensions\n end",
"def extensions\n @extensions ||= @pathname.basename.to_s.scan(/\\.[^.]+/)\n end",
"def extensions\n trail.extensions.dup\n end",
"def extension(*extensions)\n if extensions[0].is_a?(Array)\n @_ext = extensions[0]\n else\n @_ext = extensions\n end\n end",
"def get_allowed_file_extensions\n get_siteinfo('fileextensions')['query']['fileextensions'].collect { |e| e['ext'] }\n end",
"def get_extension_tags\n get_siteinfo('extensiontags')['query']['extensiontags']\n end",
"def extensions\n @trail.extensions.dup\n end",
"def extension_strings\n\t\treturn nil if @extensions.empty?\n\t\treturn @extensions.compact.join('; ')\n\tend",
"def extension_fields\n self.class.extension_fields\n end",
"def get_extensions\n # no extensions to include\n ::Occi::Collection.new\n end",
"def extensions=(extensions)\n @extensions = Array extensions\n end",
"def file_extensions\n safe_const_get(:FILE_EXTENSIONS) || []\n end",
"def extensions\n source_node.xpath('.//Extension').to_a.collect do |node|\n Extension.new(node)\n end\n end",
"def set_extensions(val)\n @extensions = val\n build_path_query\n @extensions\n end",
"def extension_methods\n @@extension_methods\n end",
"def extension(key)\r\n return nil unless @extensions.has_key?(key)\r\n return @extensions[key]\r\n end",
"def extension_whitelist\r\n self.class.available_extensions\r\n end",
"def get_extensions\n read_extensions 'storage', @options.model_extensions_dir\n end",
"def extension_methods\n @@extension_methods\n end",
"def src_extensions(extensions = nil)\n extensions ? @src_extensions = extensions : get_inherited_attribute(\"@src_extensions\")\n end",
"def extension\n return _meta_data['extension'] if _meta_data.has_key? 'extension'\n ext\n end",
"def extensions\n if @extensions.nil?\n @extensions = Hash.new\n @cert.extensions.to_a.each { |extension|\n extension = extension.to_a\n if(!@extensions[extension[0]].kind_of?(Array)) then\n @extensions[extension[0]] = []\n end\n hash = {'value' => extension[1], 'critical' => extension[2]}\n @extensions[extension[0]].push hash\n }\n end\n @extensions\n end",
"def all_extensions\n r = []\n manager.Get.each do |ext|\n r << ext\n end\n r\n end",
"def extension\n extensions.last || \"\"\n end",
"def extensions\n @extensions ||= Protocol.const_get(:\"Version#{version}\")::Extensions.new(context, logger: logger)\n end",
"def extensions\n @extensions ||= Protocol.const_get(:\"Version#{version}\")::Extensions.new(context, logger: logger)\n end",
"def custom_workflow_extensions\n return @custom_workflow_extensions\n end",
"def extensions\n @extensions ||= Dir['ext/**/extconf.rb']\n end",
"def get_extensions\n read_extensions 'compute', @options.model_extensions_dir\n end",
"def extension_item\n return @children['extension-item'][:value]\n end",
"def extension_modules\n local_extensions =\n class <<self\n included_modules-Object.included_modules\n end\n if local_extensions.size > 0\n local_extensions\n else\n [] # There weren't any; must be a literal node\n end\n end",
"def extensions=(extensions); end",
"def get_extensions\n read_extensions 'network', @options.model_extensions_dir\n end",
"def include_extensions\n @included_extensions ||= include_extensions!\n end",
"def file_extensions\n [@file_extensions].flatten.compact.uniq\n end",
"def supported_extension_oids\n\t\treturn self.root_dse[:supportedExtension]\n\tend",
"def extensions\n\t\t(self.sip_accounts || []).map{ |sip_account| sip_account.extensions }.flatten\n\tend",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def extension_fields\n @extension_fields ||= ExtensionFields.new\n end",
"def getExtension(eType)\r\n return @langProfile.getExtension(eType)\r\n end",
"def supports_extensions?\n false\n end",
"def supports_extensions?\n false\n end",
"def getExtension(eType)\n return @langProfile.getExtension(eType)\n end",
"def getExtension(eType)\n return @langProfile.getExtension(eType)\n end",
"def extensions; end",
"def extensions; end",
"def extensions; end",
"def available_data_extensions\n @available_data_extensions ||= DataExtensionCollection.new\n end",
"def extension_attribute1\n return @extension_attribute1\n end",
"def extensions_present?\n\t\treturn !self.extensions.empty?\n\tend",
"def get_extensions\n collection = Occi::Collection.new\n BACKEND_TYPES.each { |backend_type| collection.merge! backend_instances[backend_type].get_extensions }\n collection\n end",
"def custom_workflow_extensions=(value)\n @custom_workflow_extensions = value\n end",
"def template_extensions\n EXTENSIONS.keys\n end",
"def extension(key)\n @extensions[key.to_s]\n end",
"def extensions\n extensions_size = MemoryPointer::new( :size_t )\n error = OpenCL.clGetPlatformInfo( self, EXTENSIONS, 0, nil, extensions_size)\n error_check(error)\n ext = MemoryPointer::new( extensions_size.read_size_t )\n error = OpenCL.clGetPlatformInfo( self, EXTENSIONS, extensions_size.read_size_t, ext, nil)\n error_check(error)\n ext_string = ext.read_string\n return ext_string.split(\" \")\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def md_extensions\n # empty for now\n {}\n end",
"def extension_whitelist\n Spotlight::Engine.config.allowed_upload_extensions\n end",
"def get_present_file_extensions\n ret = {}\n for attachment in self.get_attachments_for_display\n ext = mimetype_to_extension(attachment.content_type)\n ext = File.extname(attachment.filename).gsub(/^[.]/, \"\") if ext.nil? && !attachment.filename.nil?\n ret[ext] = 1 if !ext.nil?\n end\n return ret.keys.join(\" \")\n end",
"def extensions\n _, *segments = name.downcase.split(\".\", -1)\n\n segments.map.with_index do |segment, index|\n \".\" + segments[index..-1].join(\".\")\n end\n end",
"def extensions\n Extension.joins(:project, :assignment)\n .where(['assignments.lecturer_id = ?', @id])\n end",
"def extension_dependencies\n latest_extension_version.try(:extension_dependencies) || []\n end",
"def format_extensions\n @trail.extensions - @engines.keys\n end",
"def extension_attribute7\n return @extension_attribute7\n end",
"def extension\n @ext ||= ( ( e = File.extname( path ) ).empty? ? nil : e )\n end",
"def extension_allowlist\n end",
"def [](key)\n @extensions_for.\n find_all {|data_type, _| key.to_s.start_with?(data_type) }.\n flat_map {|_, extensions| extensions }\n end",
"def unknown_extensions\n Extensions.get_unknown_extensions(self.cert.extensions)\n end",
"def format_extension\n extensions.reverse.detect { |ext|\n @environment.mime_types(ext) && !@environment.engines(ext)\n }\n end",
"def extensions\n\t\teigenclass = ( class << self; self; end )\n\t\treturn eigenclass.included_modules.find_all do |mod|\n\t\t\t(class << mod; self; end).include?(Treequel::Model::ObjectClass)\n\t\tend\n\tend"
] | [
"0.6979415",
"0.6979415",
"0.6979126",
"0.6968796",
"0.6774288",
"0.6774288",
"0.6657614",
"0.6621277",
"0.65884674",
"0.65884674",
"0.65792555",
"0.65792555",
"0.65792555",
"0.65336263",
"0.6531385",
"0.6531385",
"0.64769137",
"0.6385995",
"0.6354665",
"0.6318663",
"0.6278254",
"0.6223952",
"0.6215708",
"0.62044907",
"0.6109141",
"0.60748535",
"0.6071381",
"0.6048474",
"0.6029185",
"0.59632796",
"0.5952152",
"0.59490895",
"0.59253705",
"0.59209144",
"0.5913062",
"0.59091836",
"0.5907535",
"0.5856059",
"0.5838401",
"0.5829843",
"0.5820863",
"0.5757775",
"0.57423955",
"0.56864333",
"0.56569153",
"0.5650201",
"0.5645955",
"0.5604945",
"0.5601996",
"0.5583832",
"0.55797213",
"0.55797213",
"0.5559963",
"0.5552319",
"0.55411977",
"0.5526155",
"0.5519751",
"0.5517674",
"0.5494159",
"0.5412535",
"0.540999",
"0.53955567",
"0.53332496",
"0.5313773",
"0.5307101",
"0.5300432",
"0.52671444",
"0.52671444",
"0.5249653",
"0.5249653",
"0.5249297",
"0.5249297",
"0.5249297",
"0.524656",
"0.52454036",
"0.52415985",
"0.52308303",
"0.5223272",
"0.5216699",
"0.52152944",
"0.52136916",
"0.52048975",
"0.52048975",
"0.52048975",
"0.5200151",
"0.5185681",
"0.5178602",
"0.5165993",
"0.5161357",
"0.51582456",
"0.51399726",
"0.513409",
"0.5094594",
"0.5065313",
"0.5055456",
"0.5052377",
"0.504855",
"0.5046048"
] | 0.7304631 | 2 |
Sets the extensions property value. The collection of open extensions defined for the post. Readonly. Nullable. Supports $expand. | def extensions=(value)
@extensions = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extensions=(extensions)\n @extensions = Array extensions\n end",
"def extensions=(val)\n set_extensions(val)\n val\n end",
"def extensions=(value)\n subtags = Array(value).flatten\n self.extensions_sequence = subtags.empty? ? nil : subtags.join(HYPHEN)\n end",
"def set_extensions(val)\n @extensions = val\n build_path_query\n @extensions\n end",
"def extensions=(extensions); end",
"def extension(*extensions)\n if extensions[0].is_a?(Array)\n @_ext = extensions[0]\n else\n @_ext = extensions\n end\n end",
"def extensions\n @extensions ||= []\n end",
"def extensions\n @extensions ||= []\n end",
"def extensions\n @extensions ||= []\n end",
"def src_extensions(extensions = nil)\n extensions ? @src_extensions = extensions : get_inherited_attribute(\"@src_extensions\")\n end",
"def custom_workflow_extensions=(value)\n @custom_workflow_extensions = value\n end",
"def add_extensions(*extensions)\n self.extensions += extensions\n end",
"def append_extensions(*extensions)\n self.extensions.push(*extensions)\n end",
"def add_extensions(exts)\n if exts.any?\n self.class.wrap(self, extensions + exts.to_a)\n else\n self\n end\n end",
"def add_extensions(exts)\n if exts.any?\n self.class.wrap(self, extensions + exts.to_a)\n else\n self\n end\n end",
"def extensions\n @@extensions\n end",
"def extensions\n @@extensions\n end",
"def extensions\n @extensions ||= Dir[File.join(@ext_dir, '*')]\n end",
"def extension=(extension)\n instance_set(:extension, extension)\n end",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def extensions_json=(value)\n @extensions_json = value.to_s\n @extensions = JSON.parse( File.read(@extensions_json) ) # Ruby raises it's own generic I/O read errors & JSON parse errors\n \n @extensions = {} unless @extensions.is_a?(Hash)\n \n @extensions_json\n end",
"def extensions\n return @extensions\n end",
"def extensions\n return @extensions\n end",
"def extensions\n return @extensions\n end",
"def extensions\n []\n end",
"def extensions\n []\n end",
"def extensions\n if wrapper\n (wrapper.extensions + config[:extensions]).uniq\n else\n config[:extensions]\n end\n end",
"def extensions\n unless @extensions\n @extensions={}\n cert.extensions.each {|e| @extensions[e.oid]=e.value} if cert.extensions\n end\n @extensions\n end",
"def smb_auto_encrypted_file_extensions=(value)\n @smb_auto_encrypted_file_extensions = value\n end",
"def extensions\n @extensions ||= @pathname.basename.to_s.scan(/\\.[^.]+/)\n end",
"def extensions\n @extensions ||= Dir['ext/**/extconf.rb']\n end",
"def extensions\n @config[:extensions]\n end",
"def set_extension\n @extension = Fonelator::Extension.find(params[:id])\n end",
"def extensions\n source_node.xpath('.//Extension').to_a.collect do |node|\n Extension.new(node)\n end\n end",
"def extensions\n form_data = { 'action' => 'query', 'meta' => 'siteinfo', 'siprop' => 'extensions' }\n res = make_api_request(form_data)\n REXML::XPath.match(res, \"//ext\").inject(Hash.new) do |extensions, extension|\n name = extension.attributes[\"name\"] || \"\"\n extensions[name] = extension.attributes[\"version\"]\n extensions\n end\n end",
"def extend_options\n options.each do |option|\n klass = Post::Extension.const_get(option.camelize)\n self.extend klass\n end\n end",
"def extensions\n @extensions ||= Protocol.const_get(:\"Version#{version}\")::Extensions.new(context, logger: logger)\n end",
"def extensions\n @extensions ||= Protocol.const_get(:\"Version#{version}\")::Extensions.new(context, logger: logger)\n end",
"def extensions\n self.class.extensions\n end",
"def extensions\n self.class.extensions\n end",
"def prepend_extensions(*extensions)\n self.extensions.unshift(*extensions)\n end",
"def extensions\n @extensions.to_a\n end",
"def extensions\n TYPE_EXTS[type] || []\n end",
"def include_extensions\n @included_extensions ||= include_extensions!\n end",
"def extensions\n data.extensions\n end",
"def extension_whitelist\n Spotlight::Engine.config.allowed_upload_extensions\n end",
"def extensions(range)\n @extension_tag = range\n end",
"def initialize(extensions: nil)\n @extensions = extensions || {}\n end",
"def extensions\n trail.extensions.dup\n end",
"def excluded_file_extensions_for_sync_app=(value)\n @excluded_file_extensions_for_sync_app = value\n end",
"def extension_whitelist\r\n self.class.available_extensions\r\n end",
"def set_extension\n @extension = Extension.find(params[:id])\n end",
"def set_extension\n @extension = Extension.find(params[:id])\n end",
"def set_extension\n @extension = Extension.find(params[:id])\n end",
"def extensions\n @trail.extensions.dup\n end",
"def list\n @@extensions\n end",
"def add_extension(extension)\n extensions.push extension\n end",
"def file_extensions\n safe_const_get(:FILE_EXTENSIONS) || []\n end",
"def formats=(extensions); end",
"def extension name, enable\n if enable then\n @extensions |= [name]\n else\n @extensions -= [name]\n end\n end",
"def get_extensions\n get_siteinfo('extensions')['query']['extensions'].collect { |e| e['name'] }\n end",
"def register_for_extensions(extensions)\n extensions = [*extensions]\n ExtensionMap.parsers ||= []\n ExtensionMap.parsers << self\n ExtensionMap.extensions_for ||= {}\n ExtensionMap.parsers_for ||= {}\n extensions.each do |extension|\n ExtensionMap.parsers_for[extension] ||= []\n ExtensionMap.parsers_for[extension] << self\n ExtensionMap.extensions_for[self] ||= []\n ExtensionMap.extensions_for[self] << extension\n end\n end",
"def extensions; end",
"def extensions; end",
"def extensions; end",
"def extension_attribute1=(value)\n @extension_attribute1 = value\n end",
"def get_allowed_file_extensions\n get_siteinfo('fileextensions')['query']['fileextensions'].collect { |e| e['ext'] }\n end",
"def extensions(range)\n @extension_fields = ExtensionFields.new(range)\n end",
"def update!(**args)\n @request_extensions = args[:request_extensions] if args.key?(:request_extensions)\n end",
"def extension=(mod)\n mod = Module.new(&mod) if Proc === mod\n @extension = mod\n end",
"def extension=(v) Axlsx::validate_string v; @extension = v end",
"def extension_params\n params.require(:extension).permit(:tipo, :nombre, :descripcion, :horas, :fecha_i, :fecha_f, :ano_periodo, :creador, user_ids: [])\n end",
"def extension_params\n params.require(:extension).permit(:name, :desc, :url, :image, :email, :published, :trending, :featured)\n end",
"def load_extension_inclusions\n return unless config_hash[\"Extensions\"]\n config_options[:extensions] = config_hash[\"Extensions\"]\n end",
"def supports_extensions?\n false\n end",
"def supports_extensions?\n false\n end",
"def extension_params\n params.require(:extension).permit(:name, :is_listed, :number)\n end",
"def available_data_extensions\n @available_data_extensions ||= DataExtensionCollection.new\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def extensions_sequence=(value)\n sequence = value ? value.to_str : nil\n if sequence && EXTENSIONS_SEQUENCE_REGEX !~ \"#{HYPHEN}#{sequence}\"\n raise InvalidComponentError, \"#{value.inspect} does not conform to the 'extensions' ABNF.\"\n end\n set_extensions_sequence(sequence)\n dirty\n validate\n end",
"def formats=(extensions)\n parameters[:format] = extensions.first.to_s\n set_header \"action_dispatch.request.formats\", extensions.collect { |extension|\n Mime::Type.lookup_by_extension(extension)\n }\n end",
"def extension=(mod)\n if Proc === mod\n mod = Module.new { define_method(:value, &mod) }\n end\n\n raise ArgumentError, \"Extension must be a Module\" unless Module === mod\n\n @extension = mod\n end",
"def extension_allowlist\n end",
"def extensions=(_arg0); end",
"def supported_extensions\n\t\treturn self.supported_extension_oids.collect {|oid| EXTENSION_NAMES[oid] || oid }\n\tend",
"def extension_white_list\n Ckeditor.attachment_file_types\n end",
"def extension_white_list\n Ckeditor.attachment_file_types\n end",
"def register(*extensions, &block)\n extensions.reject! { |e| self.extensions.include? e }\n super(*extensions, &block)\n end",
"def register(*extensions, &block)\n extensions.reject! { |e| self.extensions.include? e }\n super(*extensions, &block)\n end",
"def extensions\n if @extensions.nil?\n @extensions = Hash.new\n @cert.extensions.to_a.each { |extension|\n extension = extension.to_a\n if(!@extensions[extension[0]].kind_of?(Array)) then\n @extensions[extension[0]] = []\n end\n hash = {'value' => extension[1], 'critical' => extension[2]}\n @extensions[extension[0]].push hash\n }\n end\n @extensions\n end",
"def extension_attribute7=(value)\n @extension_attribute7 = value\n end",
"def extension_white_list\n if Rails.application.config.respond_to? :extension_allowed_list\n Rails.application.config.extension_allowed_list\n else\n %w( pdf doc docx xls xlsx ppt )\n end\n end",
"def register_as_default_for_extensions(extensions)\n ExtensionMap.default_parser_for ||= {}\n extensions = [*extensions]\n extensions.each do |extension|\n ExtensionMap.default_parser_for[extension.to_sym] = self\n end\n end",
"def enable_extension(name, **)\n end",
"def extension_params\n params.require(:extension).permit(:name, :description, :date_start, :date_end, :state_id, :manager_id, :extension_type_id, :career_id)\n end",
"def append_extensions(match, options={})\n unless Find::EXTENSIONS.include?(File.extname(match))\n match = match + '{' + Find::EXTENSIONS.join(',') + '}'\n end\n match\n end"
] | [
"0.7719422",
"0.7679009",
"0.76493114",
"0.75424236",
"0.697394",
"0.6780455",
"0.6521282",
"0.6521282",
"0.64599013",
"0.6401049",
"0.6389613",
"0.6346257",
"0.6161969",
"0.61548823",
"0.61548823",
"0.61243296",
"0.61243296",
"0.6102344",
"0.6090421",
"0.60843617",
"0.6061079",
"0.58523434",
"0.58523434",
"0.58523434",
"0.5828561",
"0.5828561",
"0.5819658",
"0.57672304",
"0.5749778",
"0.5746293",
"0.5740505",
"0.5718103",
"0.57063",
"0.5691004",
"0.56650054",
"0.56463027",
"0.56194896",
"0.56194896",
"0.5589713",
"0.5589713",
"0.55817884",
"0.5573672",
"0.5558918",
"0.5549106",
"0.5543968",
"0.55197036",
"0.5517555",
"0.5492727",
"0.54747707",
"0.5473232",
"0.5465869",
"0.5456554",
"0.5456554",
"0.5456554",
"0.544679",
"0.5424304",
"0.53359723",
"0.5334317",
"0.53292084",
"0.53204817",
"0.5316519",
"0.5314662",
"0.53066385",
"0.53066385",
"0.53066385",
"0.5296509",
"0.52443814",
"0.52400243",
"0.52317274",
"0.5225539",
"0.521143",
"0.51995844",
"0.519604",
"0.51764965",
"0.51620626",
"0.51620626",
"0.51520115",
"0.51411664",
"0.51381165",
"0.51381165",
"0.51381165",
"0.51310456",
"0.5123068",
"0.510557",
"0.5105535",
"0.5102722",
"0.5099202",
"0.507709",
"0.507709",
"0.5072096",
"0.5072096",
"0.5067315",
"0.5066841",
"0.50630933",
"0.5062449",
"0.50529134",
"0.50461805",
"0.5042843"
] | 0.80081564 | 2 |
Gets the from property value. The from property | def from
return @from
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_property(property_name)\n value = get() and value[property_name]\n end",
"def get_property _property\n send_cmd(\"get_property #{_property}\")\n end",
"def get_value(property)\n if @env_properties.get_value(property)\n return @env_properties.get_value(property)\n end\n @file_properties.get_value(property)\n end",
"def get_property(property)\n get_compound_value(get_value(property))\n end",
"def value\n @property_hash[:value]\n end",
"def get_value(property_path)\n element = @tree.select(property_path)[0]\n element.nil? ? nil : element.value\n end",
"def prop(name)\n properties.named(name).first.andand.value\n end",
"def get_property(prop)\r\n prop = URI.parse(prop) unless prop.is_a?(URI)\r\n\r\n each_property{|predicate, object|\r\n return object if predicate == prop\r\n }\r\n nil\r\n end",
"def [](property_name)\n properties[property_name.to_s].value\n end",
"def get_property(property, data, uri = nil, is_url: false, single: true, &block)\n values = data ? data[property] : nil\n if values.is_a?(Array)\n values = values.map { |value| get_property_value(value, is_url: is_url, &block) }\n single ? values[0] : values\n else\n value = get_property_value(values, is_url: is_url, &block)\n single ? value : [value]\n end\n end",
"def get(property_name)\n result.key?(property_name) ? result[property_name][1] : nil\n end",
"def value\n\t\t# This bizarre construct is done in order to not be reliant\n\t\t# on the inherent assignment-order when using Property.new({...})\n\t\t# since that hash can be ordered anywhich way .daniel\n\t\tif value_id\n\t\t\tvalue_object.value\n\t\telse\t\t\t\n\t\t\t@value\n\t\tend\n\tend",
"def get_property(property, data, is_url: false, single: true, &block)\n values = data ? data[property] : nil\n if values.is_a?(Array)\n values = values.map do |value|\n get_property_value(value, is_url: is_url, &block)\n end\n single ? values[0] : values\n else\n value = get_property_value(values, is_url: is_url, &block)\n single ? value : [value]\n end\n end",
"def get_property(key)\n self.fetch key\n end",
"def get(property)\n self.send(property.to_sym)\n end",
"def get(value)\n value\n end",
"def intersys_get(property)\n intersys_property(property).get\n end",
"def get_property_value(value, is_url: false)\n # Assume hash values are a type/value pair\n if value.is_a?(Hash)\n type = value['type']\n value = value['value']\n else\n type = nil\n end\n # Apply transformations to string properties\n value = transform(value, is_url: is_url) if value.is_a?(String)\n # If a block is present, return the result of the block\n return yield(value, type) if block_given?\n # Otherwise return the value\n value\n end",
"def get_property(*args)\n return unless alive?\n\n command(\"get_property\", *args)[\"data\"]\n end",
"def get_property_value(value, is_url: false, &block)\n # Assume hash values are a type/value pair\n if value.is_a?(Hash)\n value_type = value['type']\n value = value['value']\n else\n value_type = nil\n end\n # Apply transformations to string properties\n value = transform_property_value(value, value_type, is_url: is_url) if value.is_a?(String)\n # Return the value or the result of calling the given block on the value\n block ? block.call(value, value_type) : value\n end",
"def get_raw_property_value(name)\n return @property_values[name]\n end",
"def get_property(propertyName,exception = nil)\n propertyName = JS::String.create_with_utf8cstring(propertyName)\n res = super(context,self,propertyName,exception)\n\n \n val_ref = JS::Value.from_pointer_with_context(context,res)\n ret = val_ref.to_ruby\n if ret.is_a?(JS::Value)\n return check_use(ret) || is_self(ret) || ret\n else\n return check_use(ret) || ret\n end\n \n \n end",
"def get_property(name)\n $NEO_LOGGER.debug{\"get property '#{name}'\"}\n \n return nil if ! property?(name)\n value = @internal_node.get_property(name.to_s)\n if self.class.marshal?(name)\n Marshal.load(String.from_java_bytes(value))\n else\n value\n end\n end",
"def get_value name\n get name\n end",
"def fetch_property(name)\n properties.where(\"name = ?\", name).first\n end",
"def property\n @property\n end",
"def property_reader(property)\n define_method \"#{property}\" do\n value = instance_variable_get(\"@#{property}\")\n if value.nil? && partial? && persisted?\n instance_variable_get(\"@#{property}\")\n else\n value\n end\n end\n end",
"def get_property(property_name)\n command(\"get_property\", property_name)\n end",
"def get_property(name)\n return @properties[name]\n end",
"def get_property!(node, name)\n value = node.xpath(name).text\n raise \"missing property '#{name}' in node: #{node}\" if value.empty?\n value\n end",
"def get\n @value\n end",
"def get_property\n @xml = client.call(url).parsed_response.css('property').first\n @attributes.merge!(parse_xml_to_hash)\n end",
"def get(key)\n properties[key]\n end",
"def get_property(property)\n rows = weather_properties.select do |row|\n row.header.text.downcase == property\n end\n\n return nil if rows.empty?\n\n rows.first.value.text\n end",
"def property(value)\n merge(property: value.to_s)\n end",
"def get_property_value(name)\n property = get_property(name)\n \n # Throw error if property doesn't exist\n if property.nil?\n raise StandardError.new(\"Property does not exist: #{name}\")\n end\n\n # Return the property value\n property.get_value(self)\n end",
"def get_property(name, default= \"\")\n\t\treturn @transport.get_path(\"meta\",\"properties\", name) { default }\n\tend",
"def from_examination(ivar)\n value = instance_variable_get(ivar) and\n return value\n examine\n instance_variable_get(ivar)\n end",
"def get_property(key, default_value = nil)\n end",
"def get_property_at_index(propertyIndex,exception = nil)\n res = super(context,self,propertyIndex,exception)\n\n \n val_ref = JS::Value.from_pointer_with_context(context,res)\n ret = val_ref.to_ruby\n if ret.is_a?(JS::Value)\n return check_use(ret) || is_self(ret) || ret\n else\n return check_use(ret) || ret\n end\n \n \n end",
"def get(index)\n properties = index.split('.')\n get_value(properties, props)\n end",
"def get_property(key)\n @data[key]\n end",
"def property(name); end",
"def get_p(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def [](property)\n data[property]\n end",
"def get_p(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def get\n val\n end",
"def get\n val\n end",
"def property(name)\n ensure_valid\n return property_cache[name] if property_cache.include? name\n prop = self.class.properties[name]\n raise \"No such property #{name}\" if not prop\n property_cache[name] = prop.get(@model, @path)\n end",
"def getvalue\n @source.getvalue\n end",
"def value_get name\n instance_variable_get(:\"@#{name}\")\n end",
"def get_value_from(message_field)\n return if message_field.nil?\n\n message_field.public_send(*field_value)\n end",
"def property(name)\n get(\"/session/#{session_id}/element/#{element_id}/property/#{name}\").value\n end",
"def [](key)\n return unless property?(key)\n if @_wrapper and @_wrapper.class.marshal?(key)\n Marshal.load(String.from_java_bytes(get_property(key.to_s)))\n else\n get_property(key.to_s)\n end\n end",
"def value\n send property.type_field\n end",
"def get\n val = @source.get\n return nil if val.nil?\n processed_val = process(val)\n if processed_val.nil?\n get\n else\n return processed_val\n end\n\n end",
"def from\n @property[:from]\n end",
"def find(input, property, value); end",
"def get_property(property_id, options={})\n return send_message(SkyDB::Message::GetProperty.new(property_id, options))\n end",
"def get_value\n @value\n end",
"def get(value)\n return nil if value.nil?\n value.is_a?(self) ? value : self.new(value)\n end",
"def current_property\n @property = Property.find(params[:id])\n end",
"def css_value(locator, property)\n find_element(locator).css_value(property)\n end",
"def get_value\n value\n end",
"def get_value\n value\n end",
"def get_property(ctx,object,propertyName,exception)\n JS::Lib.JSObjectGetProperty(ctx,object,propertyName,exception)\n end",
"def attribute_get(name)\n \n name = name.to_sym\n \n if properties.has_key?(name)\n properties[name].get(self)\n else\n nil\n end\n \n end",
"def get_po(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def get_po(component, property, componentInfo=nil)\n return $marathon.getProperty(ComponentId.new(component, componentInfo), property)\nend",
"def find_property\n @property ||= scope.find(params[:id])\n end",
"def get_property(property_name)\n function = <<~JAVASCRIPT\n (object, propertyName) => {\n const result = {__proto__: null};\n result[propertyName] = object[propertyName];\n return result;\n }\n JAVASCRIPT\n object_handle = execution_context.evaluate_handle_function(function, self, property_name).value!\n properties = object_handle.get_properties\n result = properties[property_name]\n object_handle.dispose\n result\n end",
"def [](key); self.properties[key.to_s]; end",
"def get_property( propname )\n resp = conn.get('/users/'+name+'/props/'+propname+'/')\n \n case resp.code.to_i\n when 200\n return JSON.parse( resp.body )\n when 404\n case resp.header['resource-type']\n when 'user'\n raise RestAuthUserNotFound.new( resp )\n when 'property'\n raise RestAuthPropertyNotFound.new( resp )\n else\n raise RestAuthBadResponse.new( resp, \"Received 404 without Resource-Type header\" )\n end\n else\n raise RestAuthUnknownStatus.new( resp )\n end\n end",
"def [](property); end",
"def create_localised_property_getter(property)\n # meth = property.name\n class_eval <<-EOS, __FILE__, __LINE__ + 1\n def #{property.name}\n read_localised_attribute('#{property.name}')\n end\n EOS\n if ['boolean', TrueClass.to_s.downcase].include?(property.type.to_s.downcase)\n class_eval <<-EOS, __FILE__, __LINE__\n def #{property.name}?\n value = read_localised_attribute('#{property.name}')\n !(value.nil? || value == false)\n end\n EOS\n end\n end",
"def create_property_getter(property)\n # meth = property.name\n class_eval <<-EOS, __FILE__, __LINE__ + 1\n def #{property.name}\n self['#{property.name}']\n end\n EOS\n\n if property.type == 'boolean'\n class_eval <<-EOS, __FILE__, __LINE__\n def #{property.name}?\n if self['#{property.name}'].nil? || self['#{property.name}'] == false || self['#{property.name}'].to_s.downcase == 'false'\n false\n else\n true\n end\n end\n EOS\n end\n\n if property.alias\n class_eval <<-EOS, __FILE__, __LINE__ + 1\n alias #{property.alias.to_sym} #{property.name.to_sym}\n EOS\n end\n end",
"def property(name)\n @property_hash[name]\n end",
"def get_cis_by_property_value(property, value)\n get_cis_with_properties.select {|id, hash| hash.has_key? property and hash[property] == value }\n end",
"def value\n @value ||= extract_value\n end",
"def fetch\n @value\n end",
"def get_value\n @value \n end",
"def map_value(converted_value:)\n calling_mapper.for(\n Property.new(\n value.subject,\n value.key,\n converted_value,\n value.adapter,\n value.resource\n )\n ).result\n end",
"def value(attribute, document, property_hash)\n proc = property_hash[:read_proc]\n if proc\n proc.call(document)\n else\n document[attribute == :id ? :_id : attribute]\n # An alternate way is:\n #\n # document.send(attribute == :id ? :_id : attribute)\n #\n # This will work; however, it would be confusing to support\n # properties backed by model methods here if we don't do it \n # everywhere. And supporting it everywhere would be tricky.\n #\n # For example, the filtering code relies on using MongoDB to\n # search the database. If we supported properties backed by model\n # methods, filtering / searching would be more complicated and\n # expensive.\n end\n end",
"def [](term_or_property)\n get_values(term_or_property)\n end",
"def [](key)\n get_property(key)\n end",
"def property_converter\n FedoraValue\n end",
"def css_value(prop); end",
"def find_property\n unless self.property_name.blank?\n my_properties = Property.active.find_all_by_name(self.property_name)\n unless my_properties.blank? || my_properties.length != Array(self.property_name).length\n self.properties = my_properties\n else\n self.property_lookup_failed = true\n end\n end\n # be sure the call back returns true or else the call will fail with no error message\n # from the validation loop\n return true\n end",
"def get(param)\n @property_hash[param.intern] || :absent\n end",
"def source\n @value\n end",
"def get_property(name)\n if @properties[name]\n return @properties[name]\n elsif superdescriptor.is_a?(Descriptor)\n return superdescriptor.get_property(name)\n else\n # Search class-based hierarchy\n clazz = superdescriptor\n while !clazz.nil?\n property = PropertyRegistry.get_property(clazz, name)\n if property\n return property\n end\n \n clazz = clazz.superclass\n end\n end\n end",
"def get_value name=nil\n @value\n end",
"def property(name)\n (obj = @parameters[name.intern] and obj.is_a?(Puppet::Property)) ? obj : nil\n end",
"def value!\n @value\n end",
"def name_property\n p = properties.find { |n, p| p.name_property? }\n p ? p.first : nil\n end",
"def set_properties_value\r\n @properties_value = PropertiesValue.find(params[:id])\r\n end",
"def property?(name); end",
"def get(object); end",
"def set_property\n @property = current_store.properties.find(params[:id])\n end",
"def value\n if @value\n @value\n else\n @value = resolve( :value )\n end\n end",
"def set_property\n @property = Property.friendly.find(params[:id])\n end"
] | [
"0.69360006",
"0.6776098",
"0.675788",
"0.6733413",
"0.6686744",
"0.6662315",
"0.66568536",
"0.6504543",
"0.64941776",
"0.64657277",
"0.6423776",
"0.6418536",
"0.640316",
"0.6397828",
"0.6364443",
"0.63341135",
"0.63312095",
"0.63224614",
"0.6202996",
"0.6200379",
"0.6166773",
"0.61646754",
"0.6155309",
"0.6112944",
"0.6102949",
"0.61001396",
"0.6093785",
"0.6080615",
"0.6080531",
"0.60484904",
"0.59832484",
"0.5967707",
"0.5967042",
"0.59363025",
"0.59322405",
"0.5903081",
"0.5892553",
"0.5891682",
"0.5863489",
"0.5857503",
"0.5846103",
"0.5833487",
"0.582725",
"0.581793",
"0.58072937",
"0.5801177",
"0.57976955",
"0.57976955",
"0.5765699",
"0.5764955",
"0.5762264",
"0.57607514",
"0.57515115",
"0.5735156",
"0.57260776",
"0.57167876",
"0.5712161",
"0.5710448",
"0.5703541",
"0.5700636",
"0.5689806",
"0.56878376",
"0.56876266",
"0.5687356",
"0.5687356",
"0.5684004",
"0.5670392",
"0.56590563",
"0.56590563",
"0.56527805",
"0.56439334",
"0.5618829",
"0.5613512",
"0.5612218",
"0.56060773",
"0.55997187",
"0.55793417",
"0.5576603",
"0.5572287",
"0.55716455",
"0.55709594",
"0.55698025",
"0.55108047",
"0.54799175",
"0.54542446",
"0.545002",
"0.5440295",
"0.5435487",
"0.542872",
"0.5427409",
"0.5423918",
"0.54228544",
"0.5416174",
"0.54073125",
"0.54050136",
"0.5393326",
"0.53871983",
"0.53868467",
"0.5386789",
"0.5377339",
"0.53732264"
] | 0.0 | -1 |
Sets the from property value. The from property | def from=(value)
@from = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def from(val = nil)\n self.from = val if val\n @from\n end",
"def from(value)\n @from = value\n @options[:from] = value\n self\n end",
"def from=(val)\n @from = val ? Number.new(val, vanity: true) : nil\n end",
"def from=(from)\n write_attr :from, from\n end",
"def from=(from)\n if from.nil?\n fail ArgumentError, 'invalid value for \"from\", from cannot be nil.'\n end\n @from = from\n end",
"def from(from)\n @value[:from] = from\n self\n end",
"def from(*v)\n @from = v\n end",
"def from\n @property[:from]\n end",
"def set_from\n @from = From.find(params[:id])\n end",
"def from=( val )\n header[:from] = val\n end",
"def from( val = nil )\n default :from, val\n end",
"def from(value)\n merge(gadrfrom: value.to_s)\n end",
"def mbox_from=(value)\n @mbox_from = value\n end",
"def property(property_name, options = {})\n super\n\n if options[:from]\n if property_name == options[:from]\n fail ArgumentError, \"Property name (#{property_name}) and :from option must not be the same\"\n end\n\n translations_hash[options[:from]] ||= {}\n translations_hash[options[:from]][property_name] = options[:with] || options[:transform_with]\n\n define_method \"#{options[:from]}=\" do |val|\n self.class.translations_hash[options[:from]].each do |name, with|\n self[name] = with.respond_to?(:call) ? with.call(val) : val\n end\n end\n else\n if options[:transform_with].respond_to? :call\n transforms[property_name] = options[:transform_with]\n end\n end\n end",
"def initialize_copy(from)\n @set = from.instance_variable_get('@set').clone\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def set_FromDateTime(value)\n set_input(\"FromDateTime\", value)\n end",
"def from=(location)\n if location then\n self.from_container = location.container\n self.from_row = location.row\n self.from_column = location.column\n end\n location\n end",
"def from\n attributes.fetch(:from)\n end",
"def date_from=(date_from)\n if !date_from.nil? && date_from.to_s.length > 10\n fail ArgumentError, 'invalid value for \"date_from\", the character length must be smaller than or equal to 10.'\n end\n\n if !date_from.nil? && date_from.to_s.length < 10\n fail ArgumentError, 'invalid value for \"date_from\", the character length must be great than or equal to 10.'\n end\n\n @date_from = date_from\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def set_FromIndex(value)\n set_input(\"FromIndex\", value)\n end",
"def from(value)\n merge(culfrom: value.iso8601)\n end",
"def fromid(value)\n merge(fromid: value.to_s)\n end",
"def fromid(value)\n merge(fromid: value.to_s)\n end",
"def to=(v)\n check_to(v)\n set_to(v)\n v\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def from=(address)\n self.sender = address\n end",
"def set_to(value)\n @native.set_current_value(value)\n end",
"def value=(value)\n @object.instance_variable_set(:\"@#{@name}\",coerce(value))\n end",
"def set_Property(value)\n set_input(\"Property\", value)\n end",
"def set_property\n @property = Property.all\n end",
"def from_addresses=(value)\n @from_addresses = value\n end",
"def processed_from\n ObjectPatch::Pointer.parse(@from)\n end",
"def resent_from=( val )\n header[:resent_from] = val\n end",
"def load(value)\n @value = value\n end",
"def from=(addresses)\n address_list_assign('From', addresses)\n end",
"def from_b!\n self.replace(from_b)\n end",
"def set_property!(property_name, value)\n set_property(property_name, value).data!\n end",
"def set_audio_status_from_to(from_value, to_value)\n @audio.each { |key, value| value.status = to_value if value.status == from_value }\n end",
"def set_to(v)\n @to = v\n end",
"def set(path, value, source)\n my_path = path.clone\n return source.send(my_path[0].to_s + \"=\",value) if my_path.length == 1\n target = my_path.shift\n set(my_path, value, source.send(target))\n end",
"def update_from(other)\n @name = other.name\n @position = other.position\n @topic = other.topic\n @recipients = other.recipients\n @bitrate = other.bitrate\n @user_limit = other.user_limit\n @permission_overwrites = other.permission_overwrites\n @nsfw = other.nsfw\n @parent_id = other.parent_id\n @rate_limit_per_user = other.rate_limit_per_user\n end",
"def set_from_email_address\n self.from_user_email ||= Settings::NotificationsFromEmail || user&.email\n end",
"def assign_property(name, value); end",
"def from\n @from\n end",
"def from\n @_from ||= payload.is_a?(Hash) ? payload['from'] : payload.try(:from)\n end",
"def update!(**args)\n @property_value = args[:property_value] if args.key?(:property_value)\n end",
"def set_properties_value\r\n @properties_value = PropertiesValue.find(params[:id])\r\n end",
"def set_property\n @property = Property.find_by(id: params[:id])\n end",
"def set_value\n if resolver.params.key?(name) && options.key?(resolver.params[name])\n self.value = resolver.params[name]\n else\n self.value = attributes.delete(:value)\n end\n end",
"def from\n @from == -Float::INFINITY ? nil : @from\n end",
"def initialize_properties_with_values_from_owner(entity)\n entity\n end",
"def set_value(owner, value)\n values = value.to_s.split(/ +/)\n \n @properties.each_index do |index|\n break if index > values.length-1\n subproperties = @properties[index]\n\n subproperties.each do |subproperty|\n owner.set_property_value(subproperty, values[index])\n end\n end\n end",
"def set_property\n @property = current_client.properties.find(params[:id])\n end",
"def set_raw_property_value(name, value)\n @property_values[name] = value\n end",
"def from(value)\n using(partition: value)\n end",
"def as(value)\n @value = value\n end",
"def value(value)\n\t\t@value=value\n\tend",
"def from(*source)\n clone(:from => source)\n end",
"def set_from(album)\n @artist = library.artists.add_or_retrieve(album.artist)\n @catalogue_id = album.catalogue_id.dup\n \n if album.format\n @format = library.formats.add_or_retrieve(album.format)\n else\n @format = library.formats['Unknown/Other']\n end\n \n @media_quantity = album.media_quantity \n @title = album.title.dup\n @release_date = album.release_date\n \n clear_tracks()\n \n album.tracks.each do |track|\n new_track = Track.new(self)\n new_track.set_from(track)\n end\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_property\n @property = Property.find(params[:id])\n end",
"def set_from_other(other)\n set_values(other.nv, other.dv, other.snv, other.sdv)\n end",
"def update!(**args)\n @source_property = args[:source_property] if args.key?(:source_property)\n end",
"def set_StartFrom(value)\n set_input(\"StartFrom\", value)\n end",
"def set_StartFrom(value)\n set_input(\"StartFrom\", value)\n end",
"def set_StartFrom(value)\n set_input(\"StartFrom\", value)\n end",
"def set_StartFrom(value)\n set_input(\"StartFrom\", value)\n end"
] | [
"0.74252534",
"0.734053",
"0.7271644",
"0.7140039",
"0.7095981",
"0.7080337",
"0.68769306",
"0.6429729",
"0.63151515",
"0.6152368",
"0.60757756",
"0.6034785",
"0.585997",
"0.5854877",
"0.576354",
"0.57218987",
"0.57218987",
"0.57218987",
"0.57218987",
"0.57218987",
"0.56635886",
"0.5655891",
"0.5619769",
"0.55602974",
"0.55602974",
"0.55602974",
"0.55602974",
"0.55602974",
"0.55357546",
"0.5503491",
"0.5503491",
"0.54631144",
"0.54446805",
"0.54446805",
"0.54196805",
"0.53882414",
"0.5365587",
"0.5362926",
"0.53616726",
"0.5357171",
"0.5317142",
"0.5304547",
"0.5284826",
"0.52717066",
"0.52642703",
"0.524568",
"0.52386206",
"0.5225948",
"0.5221392",
"0.52079916",
"0.5204701",
"0.5190127",
"0.5176101",
"0.5168456",
"0.516476",
"0.51620036",
"0.5159624",
"0.51590973",
"0.51302683",
"0.51192534",
"0.51139385",
"0.5113053",
"0.51110333",
"0.5095382",
"0.50857925",
"0.5075887",
"0.50688887",
"0.505756",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50491965",
"0.50375545",
"0.5035225",
"0.5032618",
"0.5032618",
"0.5032618",
"0.5032618"
] | 0.7613539 | 1 |
The deserialization information for the current model | def get_field_deserializers()
return super.merge({
"attachments" => lambda {|n| @attachments = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Attachment.create_from_discriminator_value(pn) }) },
"body" => lambda {|n| @body = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ItemBody.create_from_discriminator_value(pn) }) },
"conversationId" => lambda {|n| @conversation_id = n.get_string_value() },
"conversationThreadId" => lambda {|n| @conversation_thread_id = n.get_string_value() },
"extensions" => lambda {|n| @extensions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Extension.create_from_discriminator_value(pn) }) },
"from" => lambda {|n| @from = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },
"hasAttachments" => lambda {|n| @has_attachments = n.get_boolean_value() },
"inReplyTo" => lambda {|n| @in_reply_to = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Post.create_from_discriminator_value(pn) }) },
"multiValueExtendedProperties" => lambda {|n| @multi_value_extended_properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::MultiValueLegacyExtendedProperty.create_from_discriminator_value(pn) }) },
"newParticipants" => lambda {|n| @new_participants = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },
"receivedDateTime" => lambda {|n| @received_date_time = n.get_date_time_value() },
"sender" => lambda {|n| @sender = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },
"singleValueExtendedProperties" => lambda {|n| @single_value_extended_properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::SingleValueLegacyExtendedProperty.create_from_discriminator_value(pn) }) },
})
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def deserialized\n @deserialized ||= @serializer.deserialize @serialized_object\n end",
"def get_field_deserializers()\n return super.merge({\n \"detectionStatus\" => lambda {|n| @detection_status = n.get_enum_value(MicrosoftGraph::Models::SecurityDetectionStatus) },\n \"fileDetails\" => lambda {|n| @file_details = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityFileDetails.create_from_discriminator_value(pn) }) },\n \"mdeDeviceId\" => lambda {|n| @mde_device_id = n.get_string_value() },\n })\n end",
"def serialized_attributes\n read_inheritable_attribute(\"attr_serialized\") || { }\n end",
"def get_field_deserializers()\n return super.merge({\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"file\" => lambda {|n| @file = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"fileHash\" => lambda {|n| @file_hash = n.get_string_value() },\n \"version\" => lambda {|n| @version = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"committedContentVersion\" => lambda {|n| @committed_content_version = n.get_string_value() },\n \"contentVersions\" => lambda {|n| @content_versions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::MobileAppContent.create_from_discriminator_value(pn) }) },\n \"fileName\" => lambda {|n| @file_name = n.get_string_value() },\n \"size\" => lambda {|n| @size = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return {\n \"attribution\" => lambda {|n| @attribution = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ImageInfo.create_from_discriminator_value(pn) }) },\n \"backgroundColor\" => lambda {|n| @background_color = n.get_string_value() },\n \"content\" => lambda {|n| @content = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"displayText\" => lambda {|n| @display_text = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"detectionType\" => lambda {|n| @detection_type = n.get_string_value() },\n \"method\" => lambda {|n| @method = n.get_string_value() },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"lastModifiedDateTime\" => lambda {|n| @last_modified_date_time = n.get_date_time_value() },\n \"resource\" => lambda {|n| @resource = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Entity.create_from_discriminator_value(pn) }) },\n \"resourceReference\" => lambda {|n| @resource_reference = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ResourceReference.create_from_discriminator_value(pn) }) },\n \"resourceVisualization\" => lambda {|n| @resource_visualization = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ResourceVisualization.create_from_discriminator_value(pn) }) },\n \"weight\" => lambda {|n| @weight = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return {\n \"isEnabled\" => lambda {|n| @is_enabled = n.get_boolean_value() },\n \"maxImageSize\" => lambda {|n| @max_image_size = n.get_number_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"timeout\" => lambda {|n| @timeout = n.get_duration_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"contentData\" => lambda {|n| @content_data = n.get_string_value() },\n \"fileName\" => lambda {|n| @file_name = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"applicationVersion\" => lambda {|n| @application_version = n.get_string_value() },\n \"headerValue\" => lambda {|n| @header_value = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"callEndSubReason\" => lambda {|n| @call_end_sub_reason = n.get_number_value() },\n \"callType\" => lambda {|n| @call_type = n.get_string_value() },\n \"calleeNumber\" => lambda {|n| @callee_number = n.get_string_value() },\n \"callerNumber\" => lambda {|n| @caller_number = n.get_string_value() },\n \"correlationId\" => lambda {|n| @correlation_id = n.get_string_value() },\n \"duration\" => lambda {|n| @duration = n.get_number_value() },\n \"endDateTime\" => lambda {|n| @end_date_time = n.get_date_time_value() },\n \"failureDateTime\" => lambda {|n| @failure_date_time = n.get_date_time_value() },\n \"finalSipCode\" => lambda {|n| @final_sip_code = n.get_number_value() },\n \"finalSipCodePhrase\" => lambda {|n| @final_sip_code_phrase = n.get_string_value() },\n \"id\" => lambda {|n| @id = n.get_string_value() },\n \"inviteDateTime\" => lambda {|n| @invite_date_time = n.get_date_time_value() },\n \"mediaBypassEnabled\" => lambda {|n| @media_bypass_enabled = n.get_boolean_value() },\n \"mediaPathLocation\" => lambda {|n| @media_path_location = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"signalingLocation\" => lambda {|n| @signaling_location = n.get_string_value() },\n \"startDateTime\" => lambda {|n| @start_date_time = n.get_date_time_value() },\n \"successfulCall\" => lambda {|n| @successful_call = n.get_boolean_value() },\n \"trunkFullyQualifiedDomainName\" => lambda {|n| @trunk_fully_qualified_domain_name = n.get_string_value() },\n \"userDisplayName\" => lambda {|n| @user_display_name = n.get_string_value() },\n \"userId\" => lambda {|n| @user_id = n.get_string_value() },\n \"userPrincipalName\" => lambda {|n| @user_principal_name = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"isCourseActivitySyncEnabled\" => lambda {|n| @is_course_activity_sync_enabled = n.get_boolean_value() },\n \"learningContents\" => lambda {|n| @learning_contents = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::LearningContent.create_from_discriminator_value(pn) }) },\n \"learningCourseActivities\" => lambda {|n| @learning_course_activities = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::LearningCourseActivity.create_from_discriminator_value(pn) }) },\n \"loginWebUrl\" => lambda {|n| @login_web_url = n.get_string_value() },\n \"longLogoWebUrlForDarkTheme\" => lambda {|n| @long_logo_web_url_for_dark_theme = n.get_string_value() },\n \"longLogoWebUrlForLightTheme\" => lambda {|n| @long_logo_web_url_for_light_theme = n.get_string_value() },\n \"squareLogoWebUrlForDarkTheme\" => lambda {|n| @square_logo_web_url_for_dark_theme = n.get_string_value() },\n \"squareLogoWebUrlForLightTheme\" => lambda {|n| @square_logo_web_url_for_light_theme = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"itemId\" => lambda {|n| @item_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"title\" => lambda {|n| @title = n.get_string_value() },\n \"versionId\" => lambda {|n| @version_id = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"buildNumber\" => lambda {|n| @build_number = n.get_string_value() },\n \"bundleId\" => lambda {|n| @bundle_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"versionNumber\" => lambda {|n| @version_number = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"parentNotebook\" => lambda {|n| @parent_notebook = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Notebook.create_from_discriminator_value(pn) }) },\n \"parentSectionGroup\" => lambda {|n| @parent_section_group = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SectionGroup.create_from_discriminator_value(pn) }) },\n \"sectionGroups\" => lambda {|n| @section_groups = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::SectionGroup.create_from_discriminator_value(pn) }) },\n \"sectionGroupsUrl\" => lambda {|n| @section_groups_url = n.get_string_value() },\n \"sections\" => lambda {|n| @sections = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::OnenoteSection.create_from_discriminator_value(pn) }) },\n \"sectionsUrl\" => lambda {|n| @sections_url = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"appDisplayName\" => lambda {|n| @app_display_name = n.get_string_value() },\n \"dataType\" => lambda {|n| @data_type = n.get_string_value() },\n \"isSyncedFromOnPremises\" => lambda {|n| @is_synced_from_on_premises = n.get_boolean_value() },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"targetObjects\" => lambda {|n| @target_objects = n.get_collection_of_primitive_values(String) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"detectionStatus\" => lambda {|n| @detection_status = n.get_enum_value(MicrosoftGraph::Models::SecurityDetectionStatus) },\n \"imageFile\" => lambda {|n| @image_file = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityFileDetails.create_from_discriminator_value(pn) }) },\n \"mdeDeviceId\" => lambda {|n| @mde_device_id = n.get_string_value() },\n \"parentProcessCreationDateTime\" => lambda {|n| @parent_process_creation_date_time = n.get_date_time_value() },\n \"parentProcessId\" => lambda {|n| @parent_process_id = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"parentProcessImageFile\" => lambda {|n| @parent_process_image_file = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityFileDetails.create_from_discriminator_value(pn) }) },\n \"processCommandLine\" => lambda {|n| @process_command_line = n.get_string_value() },\n \"processCreationDateTime\" => lambda {|n| @process_creation_date_time = n.get_date_time_value() },\n \"processId\" => lambda {|n| @process_id = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"userAccount\" => lambda {|n| @user_account = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityUserAccount.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"clientContext\" => lambda {|n| @client_context = n.get_string_value() },\n \"resultInfo\" => lambda {|n| @result_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ResultInfo.create_from_discriminator_value(pn) }) },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::OperationStatus) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"completedDateTime\" => lambda {|n| @completed_date_time = n.get_date_time_value() },\n \"progress\" => lambda {|n| @progress = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::DataPolicyOperationStatus) },\n \"storageLocation\" => lambda {|n| @storage_location = n.get_string_value() },\n \"submittedDateTime\" => lambda {|n| @submitted_date_time = n.get_date_time_value() },\n \"userId\" => lambda {|n| @user_id = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"completedUnits\" => lambda {|n| @completed_units = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"progressObservationDateTime\" => lambda {|n| @progress_observation_date_time = n.get_date_time_value() },\n \"totalUnits\" => lambda {|n| @total_units = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"units\" => lambda {|n| @units = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"details\" => lambda {|n| @details = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::DetailsInfo.create_from_discriminator_value(pn) }) },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"provisioningStepType\" => lambda {|n| @provisioning_step_type = n.get_enum_value(MicrosoftGraph::Models::ProvisioningStepType) },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::ProvisioningResult) },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"downloadUri\" => lambda {|n| @download_uri = n.get_string_value() },\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"fulfilledDateTime\" => lambda {|n| @fulfilled_date_time = n.get_date_time_value() },\n \"reviewHistoryPeriodEndDateTime\" => lambda {|n| @review_history_period_end_date_time = n.get_date_time_value() },\n \"reviewHistoryPeriodStartDateTime\" => lambda {|n| @review_history_period_start_date_time = n.get_date_time_value() },\n \"runDateTime\" => lambda {|n| @run_date_time = n.get_date_time_value() },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::AccessReviewHistoryStatus) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"check32BitOn64System\" => lambda {|n| @check32_bit_on64_system = n.get_boolean_value() },\n \"comparisonValue\" => lambda {|n| @comparison_value = n.get_string_value() },\n \"fileOrFolderName\" => lambda {|n| @file_or_folder_name = n.get_string_value() },\n \"operationType\" => lambda {|n| @operation_type = n.get_enum_value(MicrosoftGraph::Models::Win32LobAppFileSystemOperationType) },\n \"operator\" => lambda {|n| @operator = n.get_enum_value(MicrosoftGraph::Models::Win32LobAppRuleOperator) },\n \"path\" => lambda {|n| @path = n.get_string_value() },\n })\n end",
"def read_object\n if @version == 0\n return amf0_deserialize\n else\n return amf3_deserialize\n end\n end",
"def get_field_deserializers()\n return {\n \"destinationFileName\" => lambda {|n| @destination_file_name = n.get_string_value() },\n \"sourceFile\" => lambda {|n| @source_file = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ItemReference.create_from_discriminator_value(pn) }) },\n }\n end",
"def get_field_deserializers()\n return {\n \"newText\" => lambda {|n| @new_text = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n \"numBytes\" => lambda {|n| @num_bytes = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n \"oldText\" => lambda {|n| @old_text = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n \"startNum\" => lambda {|n| @start_num = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"audioDeviceName\" => lambda {|n| @audio_device_name = n.get_string_value() },\n \"bookingType\" => lambda {|n| @booking_type = n.get_enum_value(MicrosoftGraph::Models::BookingType) },\n \"building\" => lambda {|n| @building = n.get_string_value() },\n \"capacity\" => lambda {|n| @capacity = n.get_number_value() },\n \"displayDeviceName\" => lambda {|n| @display_device_name = n.get_string_value() },\n \"emailAddress\" => lambda {|n| @email_address = n.get_string_value() },\n \"floorLabel\" => lambda {|n| @floor_label = n.get_string_value() },\n \"floorNumber\" => lambda {|n| @floor_number = n.get_number_value() },\n \"isWheelChairAccessible\" => lambda {|n| @is_wheel_chair_accessible = n.get_boolean_value() },\n \"label\" => lambda {|n| @label = n.get_string_value() },\n \"nickname\" => lambda {|n| @nickname = n.get_string_value() },\n \"tags\" => lambda {|n| @tags = n.get_collection_of_primitive_values(String) },\n \"videoDeviceName\" => lambda {|n| @video_device_name = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"id\" => lambda {|n| @id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"targetType\" => lambda {|n| @target_type = n.get_enum_value(MicrosoftGraph::Models::FeatureTargetType) },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"deviceCount\" => lambda {|n| @device_count = n.get_number_value() },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"managedDevices\" => lambda {|n| @managed_devices = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ManagedDevice.create_from_discriminator_value(pn) }) },\n \"platform\" => lambda {|n| @platform = n.get_enum_value(MicrosoftGraph::Models::DetectedAppPlatformType) },\n \"publisher\" => lambda {|n| @publisher = n.get_string_value() },\n \"sizeInByte\" => lambda {|n| @size_in_byte = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"version\" => lambda {|n| @version = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"activationUrl\" => lambda {|n| @activation_url = n.get_string_value() },\n \"activitySourceHost\" => lambda {|n| @activity_source_host = n.get_string_value() },\n \"appActivityId\" => lambda {|n| @app_activity_id = n.get_string_value() },\n \"appDisplayName\" => lambda {|n| @app_display_name = n.get_string_value() },\n \"contentInfo\" => lambda {|n| @content_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n \"contentUrl\" => lambda {|n| @content_url = n.get_string_value() },\n \"createdDateTime\" => lambda {|n| @created_date_time = n.get_date_time_value() },\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"fallbackUrl\" => lambda {|n| @fallback_url = n.get_string_value() },\n \"historyItems\" => lambda {|n| @history_items = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ActivityHistoryItem.create_from_discriminator_value(pn) }) },\n \"lastModifiedDateTime\" => lambda {|n| @last_modified_date_time = n.get_date_time_value() },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::Status) },\n \"userTimezone\" => lambda {|n| @user_timezone = n.get_string_value() },\n \"visualElements\" => lambda {|n| @visual_elements = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::VisualInfo.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"category\" => lambda {|n| @category = n.get_string_value() },\n \"firstSeenDateTime\" => lambda {|n| @first_seen_date_time = n.get_date_time_value() },\n \"host\" => lambda {|n| @host = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityHost.create_from_discriminator_value(pn) }) },\n \"lastSeenDateTime\" => lambda {|n| @last_seen_date_time = n.get_date_time_value() },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"version\" => lambda {|n| @version = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"deviceCount\" => lambda {|n| @device_count = n.get_number_value() },\n \"lastUpdateDateTime\" => lambda {|n| @last_update_date_time = n.get_date_time_value() },\n \"malwareIdentifier\" => lambda {|n| @malware_identifier = n.get_string_value() },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"lastActionDateTime\" => lambda {|n| @last_action_date_time = n.get_date_time_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"operation\" => lambda {|n| @operation = n.get_string_value() },\n \"status\" => lambda {|n| @status = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"details\" => lambda {|n| @details = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::DetailsInfo.create_from_discriminator_value(pn) }) },\n \"identityType\" => lambda {|n| @identity_type = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"dataLocationCode\" => lambda {|n| @data_location_code = n.get_string_value() },\n \"hostname\" => lambda {|n| @hostname = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"root\" => lambda {|n| @root = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Root.create_from_discriminator_value(pn) }) },\n }\n end",
"def get_field_deserializers()\n return {\n \"address\" => lambda {|n| @address = n.get_string_value() },\n \"itemId\" => lambda {|n| @item_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"relevanceScore\" => lambda {|n| @relevance_score = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"selectionLikelihood\" => lambda {|n| @selection_likelihood = n.get_enum_value(MicrosoftGraph::Models::SelectionLikelihoodInfo) },\n }\n end",
"def get_field_deserializers()\n return {\n \"hashes\" => lambda {|n| @hashes = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Hashes.create_from_discriminator_value(pn) }) },\n \"mimeType\" => lambda {|n| @mime_type = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"processingMetadata\" => lambda {|n| @processing_metadata = n.get_boolean_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"configurationVersion\" => lambda {|n| @configuration_version = n.get_number_value() },\n \"errorCount\" => lambda {|n| @error_count = n.get_number_value() },\n \"failedCount\" => lambda {|n| @failed_count = n.get_number_value() },\n \"lastUpdateDateTime\" => lambda {|n| @last_update_date_time = n.get_date_time_value() },\n \"notApplicableCount\" => lambda {|n| @not_applicable_count = n.get_number_value() },\n \"pendingCount\" => lambda {|n| @pending_count = n.get_number_value() },\n \"successCount\" => lambda {|n| @success_count = n.get_number_value() },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"format\" => lambda {|n| @format = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::WorkbookChartDataLabelFormat.create_from_discriminator_value(pn) }) },\n \"position\" => lambda {|n| @position = n.get_string_value() },\n \"separator\" => lambda {|n| @separator = n.get_string_value() },\n \"showBubbleSize\" => lambda {|n| @show_bubble_size = n.get_boolean_value() },\n \"showCategoryName\" => lambda {|n| @show_category_name = n.get_boolean_value() },\n \"showLegendKey\" => lambda {|n| @show_legend_key = n.get_boolean_value() },\n \"showPercentage\" => lambda {|n| @show_percentage = n.get_boolean_value() },\n \"showSeriesName\" => lambda {|n| @show_series_name = n.get_boolean_value() },\n \"showValue\" => lambda {|n| @show_value = n.get_boolean_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"errorDetails\" => lambda {|n| @error_details = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::GenericError.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"sourceId\" => lambda {|n| @source_id = n.get_string_value() },\n \"targetId\" => lambda {|n| @target_id = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"contentSource\" => lambda {|n| @content_source = n.get_string_value() },\n \"hitId\" => lambda {|n| @hit_id = n.get_string_value() },\n \"isCollapsed\" => lambda {|n| @is_collapsed = n.get_boolean_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"rank\" => lambda {|n| @rank = n.get_number_value() },\n \"resource\" => lambda {|n| @resource = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Entity.create_from_discriminator_value(pn) }) },\n \"resultTemplateId\" => lambda {|n| @result_template_id = n.get_string_value() },\n \"summary\" => lambda {|n| @summary = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"assignedUserPrincipalName\" => lambda {|n| @assigned_user_principal_name = n.get_string_value() },\n \"groupTag\" => lambda {|n| @group_tag = n.get_string_value() },\n \"hardwareIdentifier\" => lambda {|n| @hardware_identifier = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"importId\" => lambda {|n| @import_id = n.get_string_value() },\n \"productKey\" => lambda {|n| @product_key = n.get_string_value() },\n \"serialNumber\" => lambda {|n| @serial_number = n.get_string_value() },\n \"state\" => lambda {|n| @state = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ImportedWindowsAutopilotDeviceIdentityState.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"audioRoutingGroups\" => lambda {|n| @audio_routing_groups = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::AudioRoutingGroup.create_from_discriminator_value(pn) }) },\n \"callChainId\" => lambda {|n| @call_chain_id = n.get_string_value() },\n \"callOptions\" => lambda {|n| @call_options = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::CallOptions.create_from_discriminator_value(pn) }) },\n \"callRoutes\" => lambda {|n| @call_routes = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::CallRoute.create_from_discriminator_value(pn) }) },\n \"callbackUri\" => lambda {|n| @callback_uri = n.get_string_value() },\n \"chatInfo\" => lambda {|n| @chat_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ChatInfo.create_from_discriminator_value(pn) }) },\n \"contentSharingSessions\" => lambda {|n| @content_sharing_sessions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ContentSharingSession.create_from_discriminator_value(pn) }) },\n \"direction\" => lambda {|n| @direction = n.get_enum_value(MicrosoftGraph::Models::CallDirection) },\n \"incomingContext\" => lambda {|n| @incoming_context = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::IncomingContext.create_from_discriminator_value(pn) }) },\n \"mediaConfig\" => lambda {|n| @media_config = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::MediaConfig.create_from_discriminator_value(pn) }) },\n \"mediaState\" => lambda {|n| @media_state = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::CallMediaState.create_from_discriminator_value(pn) }) },\n \"meetingInfo\" => lambda {|n| @meeting_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::MeetingInfo.create_from_discriminator_value(pn) }) },\n \"myParticipantId\" => lambda {|n| @my_participant_id = n.get_string_value() },\n \"operations\" => lambda {|n| @operations = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::CommsOperation.create_from_discriminator_value(pn) }) },\n \"participants\" => lambda {|n| @participants = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Participant.create_from_discriminator_value(pn) }) },\n \"requestedModalities\" => lambda {|n| @requested_modalities = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Modality.create_from_discriminator_value(pn) }) },\n \"resultInfo\" => lambda {|n| @result_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ResultInfo.create_from_discriminator_value(pn) }) },\n \"source\" => lambda {|n| @source = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ParticipantInfo.create_from_discriminator_value(pn) }) },\n \"state\" => lambda {|n| @state = n.get_enum_value(MicrosoftGraph::Models::CallState) },\n \"subject\" => lambda {|n| @subject = n.get_string_value() },\n \"targets\" => lambda {|n| @targets = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::InvitationParticipantInfo.create_from_discriminator_value(pn) }) },\n \"tenantId\" => lambda {|n| @tenant_id = n.get_string_value() },\n \"toneInfo\" => lambda {|n| @tone_info = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ToneInfo.create_from_discriminator_value(pn) }) },\n \"transcription\" => lambda {|n| @transcription = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::CallTranscriptionInfo.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return {\n \"externalId\" => lambda {|n| @external_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"teacherNumber\" => lambda {|n| @teacher_number = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"customKeyIdentifier\" => lambda {|n| @custom_key_identifier = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"endDateTime\" => lambda {|n| @end_date_time = n.get_date_time_value() },\n \"key\" => lambda {|n| @key = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"keyId\" => lambda {|n| @key_id = n.get_guid_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"startDateTime\" => lambda {|n| @start_date_time = n.get_date_time_value() },\n \"thumbprint\" => lambda {|n| @thumbprint = n.get_string_value() },\n \"type\" => lambda {|n| @type = n.get_string_value() },\n \"usage\" => lambda {|n| @usage = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"allowMultipleLines\" => lambda {|n| @allow_multiple_lines = n.get_boolean_value() },\n \"appendChangesToExistingText\" => lambda {|n| @append_changes_to_existing_text = n.get_boolean_value() },\n \"linesForEditing\" => lambda {|n| @lines_for_editing = n.get_number_value() },\n \"maxLength\" => lambda {|n| @max_length = n.get_number_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"textType\" => lambda {|n| @text_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"assignCategories\" => lambda {|n| @assign_categories = n.get_collection_of_primitive_values(String) },\n \"copyToFolder\" => lambda {|n| @copy_to_folder = n.get_string_value() },\n \"delete\" => lambda {|n| @delete = n.get_boolean_value() },\n \"forwardAsAttachmentTo\" => lambda {|n| @forward_as_attachment_to = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"forwardTo\" => lambda {|n| @forward_to = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"markAsRead\" => lambda {|n| @mark_as_read = n.get_boolean_value() },\n \"markImportance\" => lambda {|n| @mark_importance = n.get_enum_value(MicrosoftGraph::Models::Importance) },\n \"moveToFolder\" => lambda {|n| @move_to_folder = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"permanentDelete\" => lambda {|n| @permanent_delete = n.get_boolean_value() },\n \"redirectTo\" => lambda {|n| @redirect_to = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"stopProcessingRules\" => lambda {|n| @stop_processing_rules = n.get_boolean_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"acceptMappedClaims\" => lambda {|n| @accept_mapped_claims = n.get_boolean_value() },\n \"knownClientApplications\" => lambda {|n| @known_client_applications = n.get_collection_of_primitive_values(UUIDTools::UUID) },\n \"oauth2PermissionScopes\" => lambda {|n| @oauth2_permission_scopes = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::PermissionScope.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"preAuthorizedApplications\" => lambda {|n| @pre_authorized_applications = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::PreAuthorizedApplication.create_from_discriminator_value(pn) }) },\n \"requestedAccessTokenVersion\" => lambda {|n| @requested_access_token_version = n.get_number_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"content\" => lambda {|n| @content = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"contentUrl\" => lambda {|n| @content_url = n.get_string_value() },\n \"createdByAppId\" => lambda {|n| @created_by_app_id = n.get_string_value() },\n \"lastModifiedDateTime\" => lambda {|n| @last_modified_date_time = n.get_date_time_value() },\n \"level\" => lambda {|n| @level = n.get_number_value() },\n \"links\" => lambda {|n| @links = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::PageLinks.create_from_discriminator_value(pn) }) },\n \"order\" => lambda {|n| @order = n.get_number_value() },\n \"parentNotebook\" => lambda {|n| @parent_notebook = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Notebook.create_from_discriminator_value(pn) }) },\n \"parentSection\" => lambda {|n| @parent_section = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::OnenoteSection.create_from_discriminator_value(pn) }) },\n \"title\" => lambda {|n| @title = n.get_string_value() },\n \"userTags\" => lambda {|n| @user_tags = n.get_collection_of_primitive_values(String) },\n })\n end",
"def get_field_deserializers()\n return {\n \"failedRuns\" => lambda {|n| @failed_runs = n.get_number_value() },\n \"failedTasks\" => lambda {|n| @failed_tasks = n.get_number_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"successfulRuns\" => lambda {|n| @successful_runs = n.get_number_value() },\n \"totalRuns\" => lambda {|n| @total_runs = n.get_number_value() },\n \"totalTasks\" => lambda {|n| @total_tasks = n.get_number_value() },\n \"totalUsers\" => lambda {|n| @total_users = n.get_number_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"id\" => lambda {|n| @id = n.get_guid_value() },\n \"isEnabled\" => lambda {|n| @is_enabled = n.get_boolean_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"value\" => lambda {|n| @value = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"recommendedActions\" => lambda {|n| @recommended_actions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RecommendedAction.create_from_discriminator_value(pn) }) },\n \"resolvedTargetsCount\" => lambda {|n| @resolved_targets_count = n.get_number_value() },\n \"simulationEventsContent\" => lambda {|n| @simulation_events_content = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SimulationEventsContent.create_from_discriminator_value(pn) }) },\n \"trainingEventsContent\" => lambda {|n| @training_events_content = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::TrainingEventsContent.create_from_discriminator_value(pn) }) },\n }\n end",
"def get_field_deserializers()\n return {\n \"customKeyIdentifier\" => lambda {|n| @custom_key_identifier = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"endDateTime\" => lambda {|n| @end_date_time = n.get_date_time_value() },\n \"hint\" => lambda {|n| @hint = n.get_string_value() },\n \"keyId\" => lambda {|n| @key_id = n.get_guid_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"secretText\" => lambda {|n| @secret_text = n.get_string_value() },\n \"startDateTime\" => lambda {|n| @start_date_time = n.get_date_time_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"isRequired\" => lambda {|n| @is_required = n.get_boolean_value() },\n \"locations\" => lambda {|n| @locations = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::LocationConstraintItem.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"suggestLocation\" => lambda {|n| @suggest_location = n.get_boolean_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"activityType\" => lambda {|n| @activity_type = n.get_string_value() },\n \"chainId\" => lambda {|n| @chain_id = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"previewText\" => lambda {|n| @preview_text = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ItemBody.create_from_discriminator_value(pn) }) },\n \"recipient\" => lambda {|n| @recipient = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::TeamworkNotificationRecipient.create_from_discriminator_value(pn) }) },\n \"templateParameters\" => lambda {|n| @template_parameters = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::KeyValuePair.create_from_discriminator_value(pn) }) },\n \"topic\" => lambda {|n| @topic = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::TeamworkActivityTopic.create_from_discriminator_value(pn) }) },\n }\n end",
"def metadata\n self.class.metadata\n end",
"def get_field_deserializers()\n return {\n \"activityIdentifier\" => lambda {|n| @activity_identifier = n.get_string_value() },\n \"countEntitled\" => lambda {|n| @count_entitled = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countEntitledForProvisioning\" => lambda {|n| @count_entitled_for_provisioning = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countEscrowed\" => lambda {|n| @count_escrowed = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countEscrowedRaw\" => lambda {|n| @count_escrowed_raw = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countExported\" => lambda {|n| @count_exported = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countExports\" => lambda {|n| @count_exports = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countImported\" => lambda {|n| @count_imported = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countImportedDeltas\" => lambda {|n| @count_imported_deltas = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"countImportedReferenceDeltas\" => lambda {|n| @count_imported_reference_deltas = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"error\" => lambda {|n| @error = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SynchronizationError.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"state\" => lambda {|n| @state = n.get_enum_value(MicrosoftGraph::Models::SynchronizationTaskExecutionResult) },\n \"timeBegan\" => lambda {|n| @time_began = n.get_date_time_value() },\n \"timeEnded\" => lambda {|n| @time_ended = n.get_date_time_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"content\" => lambda {|n| @content = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"transportKey\" => lambda {|n| @transport_key = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"activeDeviceCount\" => lambda {|n| @active_device_count = n.get_number_value() },\n \"deviceManufacturer\" => lambda {|n| @device_manufacturer = n.get_string_value() },\n \"deviceModel\" => lambda {|n| @device_model = n.get_string_value() },\n \"healthStatus\" => lambda {|n| @health_status = n.get_enum_value(MicrosoftGraph::Models::UserExperienceAnalyticsHealthState) },\n \"meanTimeToFailureInMinutes\" => lambda {|n| @mean_time_to_failure_in_minutes = n.get_number_value() },\n \"modelAppHealthScore\" => lambda {|n| @model_app_health_score = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return {\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"resourceAccess\" => lambda {|n| @resource_access = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ResourceAccess.create_from_discriminator_value(pn) }) },\n \"resourceAppId\" => lambda {|n| @resource_app_id = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"createdDateTime\" => lambda {|n| @created_date_time = n.get_date_time_value() },\n \"deviceId\" => lambda {|n| @device_id = n.get_string_value() },\n \"key\" => lambda {|n| @key = n.get_string_value() },\n \"volumeType\" => lambda {|n| @volume_type = n.get_enum_value(MicrosoftGraph::Models::VolumeType) },\n })\n end",
"def get_field_deserializers()\n return {\n \"anchor\" => lambda {|n| @anchor = n.get_boolean_value() },\n \"apiExpressions\" => lambda {|n| @api_expressions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::StringKeyStringValuePair.create_from_discriminator_value(pn) }) },\n \"caseExact\" => lambda {|n| @case_exact = n.get_boolean_value() },\n \"defaultValue\" => lambda {|n| @default_value = n.get_string_value() },\n \"flowNullValues\" => lambda {|n| @flow_null_values = n.get_boolean_value() },\n \"metadata\" => lambda {|n| @metadata = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::AttributeDefinitionMetadataEntry.create_from_discriminator_value(pn) }) },\n \"multivalued\" => lambda {|n| @multivalued = n.get_boolean_value() },\n \"mutability\" => lambda {|n| @mutability = n.get_enum_value(MicrosoftGraph::Models::Mutability) },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"referencedObjects\" => lambda {|n| @referenced_objects = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ReferencedObject.create_from_discriminator_value(pn) }) },\n \"required\" => lambda {|n| @required = n.get_boolean_value() },\n \"type\" => lambda {|n| @type = n.get_enum_value(MicrosoftGraph::Models::AttributeType) },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"content\" => lambda {|n| @content = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"nextExpectedRanges\" => lambda {|n| @next_expected_ranges = n.get_collection_of_primitive_values(String) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"content\" => lambda {|n| @content = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"size\" => lambda {|n| @size = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"averageBlueScreens\" => lambda {|n| @average_blue_screens = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"averageRestarts\" => lambda {|n| @average_restarts = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"blueScreenCount\" => lambda {|n| @blue_screen_count = n.get_number_value() },\n \"bootScore\" => lambda {|n| @boot_score = n.get_number_value() },\n \"coreBootTimeInMs\" => lambda {|n| @core_boot_time_in_ms = n.get_number_value() },\n \"coreLoginTimeInMs\" => lambda {|n| @core_login_time_in_ms = n.get_number_value() },\n \"deviceCount\" => lambda {|n| @device_count = n.get_object_value(lambda {|pn| Int64.create_from_discriminator_value(pn) }) },\n \"deviceName\" => lambda {|n| @device_name = n.get_string_value() },\n \"diskType\" => lambda {|n| @disk_type = n.get_enum_value(MicrosoftGraph::Models::DiskType) },\n \"groupPolicyBootTimeInMs\" => lambda {|n| @group_policy_boot_time_in_ms = n.get_number_value() },\n \"groupPolicyLoginTimeInMs\" => lambda {|n| @group_policy_login_time_in_ms = n.get_number_value() },\n \"healthStatus\" => lambda {|n| @health_status = n.get_enum_value(MicrosoftGraph::Models::UserExperienceAnalyticsHealthState) },\n \"loginScore\" => lambda {|n| @login_score = n.get_number_value() },\n \"manufacturer\" => lambda {|n| @manufacturer = n.get_string_value() },\n \"model\" => lambda {|n| @model = n.get_string_value() },\n \"modelStartupPerformanceScore\" => lambda {|n| @model_startup_performance_score = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"operatingSystemVersion\" => lambda {|n| @operating_system_version = n.get_string_value() },\n \"responsiveDesktopTimeInMs\" => lambda {|n| @responsive_desktop_time_in_ms = n.get_number_value() },\n \"restartCount\" => lambda {|n| @restart_count = n.get_number_value() },\n \"startupPerformanceScore\" => lambda {|n| @startup_performance_score = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return {\n \"connectingIP\" => lambda {|n| @connecting_i_p = n.get_string_value() },\n \"deliveryAction\" => lambda {|n| @delivery_action = n.get_string_value() },\n \"deliveryLocation\" => lambda {|n| @delivery_location = n.get_string_value() },\n \"directionality\" => lambda {|n| @directionality = n.get_string_value() },\n \"internetMessageId\" => lambda {|n| @internet_message_id = n.get_string_value() },\n \"messageFingerprint\" => lambda {|n| @message_fingerprint = n.get_string_value() },\n \"messageReceivedDateTime\" => lambda {|n| @message_received_date_time = n.get_date_time_value() },\n \"messageSubject\" => lambda {|n| @message_subject = n.get_string_value() },\n \"networkMessageId\" => lambda {|n| @network_message_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"application\" => lambda {|n| @application = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Identity.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"preventsDownload\" => lambda {|n| @prevents_download = n.get_boolean_value() },\n \"scope\" => lambda {|n| @scope = n.get_string_value() },\n \"type\" => lambda {|n| @type = n.get_string_value() },\n \"webHtml\" => lambda {|n| @web_html = n.get_string_value() },\n \"webUrl\" => lambda {|n| @web_url = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"id\" => lambda {|n| @id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"nextExpectedRanges\" => lambda {|n| @next_expected_ranges = n.get_collection_of_primitive_values(String) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"uploadUrl\" => lambda {|n| @upload_url = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"appCrashCount\" => lambda {|n| @app_crash_count = n.get_number_value() },\n \"appDisplayName\" => lambda {|n| @app_display_name = n.get_string_value() },\n \"appName\" => lambda {|n| @app_name = n.get_string_value() },\n \"appPublisher\" => lambda {|n| @app_publisher = n.get_string_value() },\n \"appVersion\" => lambda {|n| @app_version = n.get_string_value() },\n \"deviceCountWithCrashes\" => lambda {|n| @device_count_with_crashes = n.get_number_value() },\n \"isLatestUsedVersion\" => lambda {|n| @is_latest_used_version = n.get_boolean_value() },\n \"isMostUsedVersion\" => lambda {|n| @is_most_used_version = n.get_boolean_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"attributeMappings\" => lambda {|n| @attribute_mappings = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::AttributeMapping.create_from_discriminator_value(pn) }) },\n \"enabled\" => lambda {|n| @enabled = n.get_boolean_value() },\n \"flowTypes\" => lambda {|n| @flow_types = n.get_enum_value(MicrosoftGraph::Models::ObjectFlowTypes) },\n \"metadata\" => lambda {|n| @metadata = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ObjectMappingMetadataEntry.create_from_discriminator_value(pn) }) },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"scope\" => lambda {|n| @scope = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Filter.create_from_discriminator_value(pn) }) },\n \"sourceObjectName\" => lambda {|n| @source_object_name = n.get_string_value() },\n \"targetObjectName\" => lambda {|n| @target_object_name = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"isDefault\" => lambda {|n| @is_default = n.get_boolean_value() },\n \"links\" => lambda {|n| @links = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SectionLinks.create_from_discriminator_value(pn) }) },\n \"pages\" => lambda {|n| @pages = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::OnenotePage.create_from_discriminator_value(pn) }) },\n \"pagesUrl\" => lambda {|n| @pages_url = n.get_string_value() },\n \"parentNotebook\" => lambda {|n| @parent_notebook = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Notebook.create_from_discriminator_value(pn) }) },\n \"parentSectionGroup\" => lambda {|n| @parent_section_group = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SectionGroup.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"appCrashCount\" => lambda {|n| @app_crash_count = n.get_number_value() },\n \"appHangCount\" => lambda {|n| @app_hang_count = n.get_number_value() },\n \"crashedAppCount\" => lambda {|n| @crashed_app_count = n.get_number_value() },\n \"deviceAppHealthScore\" => lambda {|n| @device_app_health_score = n.get_object_value(lambda {|pn| Double.create_from_discriminator_value(pn) }) },\n \"deviceDisplayName\" => lambda {|n| @device_display_name = n.get_string_value() },\n \"deviceId\" => lambda {|n| @device_id = n.get_string_value() },\n \"deviceManufacturer\" => lambda {|n| @device_manufacturer = n.get_string_value() },\n \"deviceModel\" => lambda {|n| @device_model = n.get_string_value() },\n \"healthStatus\" => lambda {|n| @health_status = n.get_enum_value(MicrosoftGraph::Models::UserExperienceAnalyticsHealthState) },\n \"meanTimeToFailureInMinutes\" => lambda {|n| @mean_time_to_failure_in_minutes = n.get_number_value() },\n \"processedDateTime\" => lambda {|n| @processed_date_time = n.get_date_time_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"messageId\" => lambda {|n| @message_id = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"replyChainMessageId\" => lambda {|n| @reply_chain_message_id = n.get_string_value() },\n \"threadId\" => lambda {|n| @thread_id = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"createdDateTime\" => lambda {|n| @created_date_time = n.get_date_time_value() },\n \"isUsable\" => lambda {|n| @is_usable = n.get_boolean_value() },\n \"isUsableOnce\" => lambda {|n| @is_usable_once = n.get_boolean_value() },\n \"lifetimeInMinutes\" => lambda {|n| @lifetime_in_minutes = n.get_number_value() },\n \"methodUsabilityReason\" => lambda {|n| @method_usability_reason = n.get_string_value() },\n \"startDateTime\" => lambda {|n| @start_date_time = n.get_date_time_value() },\n \"temporaryAccessPass\" => lambda {|n| @temporary_access_pass = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"owner\" => lambda {|n| @owner = n.get_string_value() },\n \"properties\" => lambda {|n| @properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ExtensionSchemaProperty.create_from_discriminator_value(pn) }) },\n \"status\" => lambda {|n| @status = n.get_string_value() },\n \"targetTypes\" => lambda {|n| @target_types = n.get_collection_of_primitive_values(String) },\n })\n end",
"def get_field_deserializers()\n return {\n \"bargeInAllowed\" => lambda {|n| @barge_in_allowed = n.get_boolean_value() },\n \"clientContext\" => lambda {|n| @client_context = n.get_string_value() },\n \"initialSilenceTimeoutInSeconds\" => lambda {|n| @initial_silence_timeout_in_seconds = n.get_number_value() },\n \"maxRecordDurationInSeconds\" => lambda {|n| @max_record_duration_in_seconds = n.get_number_value() },\n \"maxSilenceTimeoutInSeconds\" => lambda {|n| @max_silence_timeout_in_seconds = n.get_number_value() },\n \"playBeep\" => lambda {|n| @play_beep = n.get_boolean_value() },\n \"prompts\" => lambda {|n| @prompts = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Prompt.create_from_discriminator_value(pn) }) },\n \"stopTones\" => lambda {|n| @stop_tones = n.get_collection_of_primitive_values(String) },\n }\n end",
"def get_field_deserializers()\n return {\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"response\" => lambda {|n| @response = n.get_enum_value(MicrosoftGraph::Models::ResponseType) },\n \"time\" => lambda {|n| @time = n.get_date_time_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"driveId\" => lambda {|n| @drive_id = n.get_string_value() },\n \"driveType\" => lambda {|n| @drive_type = n.get_string_value() },\n \"id\" => lambda {|n| @id = n.get_string_value() },\n \"name\" => lambda {|n| @name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"path\" => lambda {|n| @path = n.get_string_value() },\n \"shareId\" => lambda {|n| @share_id = n.get_string_value() },\n \"sharepointIds\" => lambda {|n| @sharepoint_ids = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SharepointIds.create_from_discriminator_value(pn) }) },\n \"siteId\" => lambda {|n| @site_id = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"appCrashCount\" => lambda {|n| @app_crash_count = n.get_number_value() },\n \"appDisplayName\" => lambda {|n| @app_display_name = n.get_string_value() },\n \"appName\" => lambda {|n| @app_name = n.get_string_value() },\n \"appPublisher\" => lambda {|n| @app_publisher = n.get_string_value() },\n \"appVersion\" => lambda {|n| @app_version = n.get_string_value() },\n \"deviceDisplayName\" => lambda {|n| @device_display_name = n.get_string_value() },\n \"deviceId\" => lambda {|n| @device_id = n.get_string_value() },\n \"processedDateTime\" => lambda {|n| @processed_date_time = n.get_date_time_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"activeMalwareDetectionCount\" => lambda {|n| @active_malware_detection_count = n.get_number_value() },\n \"category\" => lambda {|n| @category = n.get_enum_value(MicrosoftGraph::Models::WindowsMalwareCategory) },\n \"deviceCount\" => lambda {|n| @device_count = n.get_number_value() },\n \"distinctActiveMalwareCount\" => lambda {|n| @distinct_active_malware_count = n.get_number_value() },\n \"lastUpdateDateTime\" => lambda {|n| @last_update_date_time = n.get_date_time_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"content\" => lambda {|n| @content = n.get_object_value(lambda {|pn| Base64url.create_from_discriminator_value(pn) }) },\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"issuer\" => lambda {|n| @issuer = n.get_string_value() },\n \"issuerName\" => lambda {|n| @issuer_name = n.get_string_value() },\n \"status\" => lambda {|n| @status = n.get_enum_value(MicrosoftGraph::Models::CertificateStatus) },\n \"subject\" => lambda {|n| @subject = n.get_string_value() },\n \"subjectName\" => lambda {|n| @subject_name = n.get_string_value() },\n \"uploadDateTime\" => lambda {|n| @upload_date_time = n.get_date_time_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"appId\" => lambda {|n| @app_id = n.get_string_value() },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"servicePrincipalId\" => lambda {|n| @service_principal_id = n.get_string_value() },\n \"servicePrincipalName\" => lambda {|n| @service_principal_name = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"riskDetections\" => lambda {|n| @risk_detections = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RiskDetection.create_from_discriminator_value(pn) }) },\n \"riskyServicePrincipals\" => lambda {|n| @risky_service_principals = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RiskyServicePrincipal.create_from_discriminator_value(pn) }) },\n \"riskyUsers\" => lambda {|n| @risky_users = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RiskyUser.create_from_discriminator_value(pn) }) },\n \"servicePrincipalRiskDetections\" => lambda {|n| @service_principal_risk_detections = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ServicePrincipalRiskDetection.create_from_discriminator_value(pn) }) },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n })\n end",
"def get_field_deserializers()\n return super.merge({\n })\n end",
"def get_field_deserializers()\n return super.merge({\n })\n end",
"def get_field_deserializers()\n return {\n \"failedTasks\" => lambda {|n| @failed_tasks = n.get_number_value() },\n \"failedUsers\" => lambda {|n| @failed_users = n.get_number_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"successfulUsers\" => lambda {|n| @successful_users = n.get_number_value() },\n \"totalTasks\" => lambda {|n| @total_tasks = n.get_number_value() },\n \"totalUsers\" => lambda {|n| @total_users = n.get_number_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"durationInSeconds\" => lambda {|n| @duration_in_seconds = n.get_number_value() },\n \"joinDateTime\" => lambda {|n| @join_date_time = n.get_date_time_value() },\n \"leaveDateTime\" => lambda {|n| @leave_date_time = n.get_date_time_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"applicationId\" => lambda {|n| @application_id = n.get_string_value() },\n \"changeType\" => lambda {|n| @change_type = n.get_string_value() },\n \"clientState\" => lambda {|n| @client_state = n.get_string_value() },\n \"creatorId\" => lambda {|n| @creator_id = n.get_string_value() },\n \"encryptionCertificate\" => lambda {|n| @encryption_certificate = n.get_string_value() },\n \"encryptionCertificateId\" => lambda {|n| @encryption_certificate_id = n.get_string_value() },\n \"expirationDateTime\" => lambda {|n| @expiration_date_time = n.get_date_time_value() },\n \"includeResourceData\" => lambda {|n| @include_resource_data = n.get_boolean_value() },\n \"latestSupportedTlsVersion\" => lambda {|n| @latest_supported_tls_version = n.get_string_value() },\n \"lifecycleNotificationUrl\" => lambda {|n| @lifecycle_notification_url = n.get_string_value() },\n \"notificationQueryOptions\" => lambda {|n| @notification_query_options = n.get_string_value() },\n \"notificationUrl\" => lambda {|n| @notification_url = n.get_string_value() },\n \"notificationUrlAppId\" => lambda {|n| @notification_url_app_id = n.get_string_value() },\n \"resource\" => lambda {|n| @resource = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"entityType\" => lambda {|n| @entity_type = n.get_string_value() },\n \"mailNickname\" => lambda {|n| @mail_nickname = n.get_string_value() },\n \"onBehalfOfUserId\" => lambda {|n| @on_behalf_of_user_id = n.get_guid_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"actionName\" => lambda {|n| @action_name = n.get_string_value() },\n \"actionState\" => lambda {|n| @action_state = n.get_enum_value(MicrosoftGraph::Models::ActionState) },\n \"lastUpdatedDateTime\" => lambda {|n| @last_updated_date_time = n.get_date_time_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"startDateTime\" => lambda {|n| @start_date_time = n.get_date_time_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"accountName\" => lambda {|n| @account_name = n.get_string_value() },\n \"azureAdUserId\" => lambda {|n| @azure_ad_user_id = n.get_string_value() },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"domainName\" => lambda {|n| @domain_name = n.get_string_value() },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"userPrincipalName\" => lambda {|n| @user_principal_name = n.get_string_value() },\n \"userSid\" => lambda {|n| @user_sid = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return super.merge({\n \"comment\" => lambda {|n| @comment = n.get_string_value() },\n \"createdBy\" => lambda {|n| @created_by = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::IdentitySet.create_from_discriminator_value(pn) }) },\n \"createdDateTime\" => lambda {|n| @created_date_time = n.get_date_time_value() },\n \"items\" => lambda {|n| @items = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::DocumentSetVersionItem.create_from_discriminator_value(pn) }) },\n \"shouldCaptureMinorVersion\" => lambda {|n| @should_capture_minor_version = n.get_boolean_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"resourceId\" => lambda {|n| @resource_id = n.get_string_value() },\n \"uri\" => lambda {|n| @uri = n.get_string_value() },\n }\n end",
"def get_field_deserializers()\n return {\n \"callChainId\" => lambda {|n| @call_chain_id = n.get_guid_value() },\n \"cloudServiceDeploymentEnvironment\" => lambda {|n| @cloud_service_deployment_environment = n.get_string_value() },\n \"cloudServiceDeploymentId\" => lambda {|n| @cloud_service_deployment_id = n.get_string_value() },\n \"cloudServiceInstanceName\" => lambda {|n| @cloud_service_instance_name = n.get_string_value() },\n \"cloudServiceName\" => lambda {|n| @cloud_service_name = n.get_string_value() },\n \"deviceDescription\" => lambda {|n| @device_description = n.get_string_value() },\n \"deviceName\" => lambda {|n| @device_name = n.get_string_value() },\n \"mediaLegId\" => lambda {|n| @media_leg_id = n.get_guid_value() },\n \"mediaQualityList\" => lambda {|n| @media_quality_list = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::TeleconferenceDeviceMediaQuality.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"participantId\" => lambda {|n| @participant_id = n.get_guid_value() },\n }\n end",
"def _before_validation\n serialize_deserialized_values\n super\n end",
"def get_field_deserializers()\n return super.merge({\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"isBuiltIn\" => lambda {|n| @is_built_in = n.get_boolean_value() },\n \"roleAssignments\" => lambda {|n| @role_assignments = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RoleAssignment.create_from_discriminator_value(pn) }) },\n \"rolePermissions\" => lambda {|n| @role_permissions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::RolePermission.create_from_discriminator_value(pn) }) },\n })\n end",
"def get_field_deserializers()\n return super.merge({\n \"firstSeenDateTime\" => lambda {|n| @first_seen_date_time = n.get_date_time_value() },\n \"host\" => lambda {|n| @host = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::SecurityHost.create_from_discriminator_value(pn) }) },\n \"kind\" => lambda {|n| @kind = n.get_string_value() },\n \"lastSeenDateTime\" => lambda {|n| @last_seen_date_time = n.get_date_time_value() },\n \"value\" => lambda {|n| @value = n.get_string_value() },\n })\n end",
"def get_field_deserializers()\n return {\n \"color\" => lambda {|n| @color = n.get_string_value() },\n \"criterion1\" => lambda {|n| @criterion1 = n.get_string_value() },\n \"criterion2\" => lambda {|n| @criterion2 = n.get_string_value() },\n \"dynamicCriteria\" => lambda {|n| @dynamic_criteria = n.get_string_value() },\n \"filterOn\" => lambda {|n| @filter_on = n.get_string_value() },\n \"icon\" => lambda {|n| @icon = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::WorkbookIcon.create_from_discriminator_value(pn) }) },\n \"@odata.type\" => lambda {|n| @odata_type = n.get_string_value() },\n \"operator\" => lambda {|n| @operator = n.get_string_value() },\n \"values\" => lambda {|n| @values = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Json.create_from_discriminator_value(pn) }) },\n }\n end"
] | [
"0.6510734",
"0.63224316",
"0.6322254",
"0.63094735",
"0.62954384",
"0.6238735",
"0.6232461",
"0.62155676",
"0.6200175",
"0.6199403",
"0.6173917",
"0.61733985",
"0.61705345",
"0.61631054",
"0.61620396",
"0.6158031",
"0.6156071",
"0.6142402",
"0.613998",
"0.6138061",
"0.61200523",
"0.6089013",
"0.60869795",
"0.6079146",
"0.60785794",
"0.6070405",
"0.6063533",
"0.60625833",
"0.6061235",
"0.60584134",
"0.6055769",
"0.6051312",
"0.60465735",
"0.6046329",
"0.6031944",
"0.6029311",
"0.6028314",
"0.60255736",
"0.6022033",
"0.60210633",
"0.6009887",
"0.5988654",
"0.59844214",
"0.59793943",
"0.5975247",
"0.5969614",
"0.596824",
"0.5966432",
"0.5965554",
"0.596292",
"0.5951651",
"0.5950895",
"0.59456754",
"0.59448177",
"0.593984",
"0.59362113",
"0.5935833",
"0.59319806",
"0.59312665",
"0.59307545",
"0.5930406",
"0.5926444",
"0.5926136",
"0.59240156",
"0.5922303",
"0.591605",
"0.591336",
"0.5913327",
"0.59130335",
"0.5910617",
"0.5906052",
"0.5906045",
"0.59042066",
"0.5903306",
"0.5902868",
"0.59027255",
"0.5902389",
"0.5902219",
"0.5901496",
"0.58978146",
"0.5891392",
"0.5890228",
"0.5885622",
"0.5885429",
"0.5884738",
"0.5883899",
"0.5883899",
"0.5883899",
"0.58811784",
"0.5878516",
"0.5877111",
"0.5869185",
"0.5844199",
"0.58430207",
"0.58408237",
"0.58383596",
"0.58362466",
"0.5836192",
"0.5835942",
"0.5834559",
"0.583357"
] | 0.0 | -1 |
Gets the hasAttachments property value. Indicates whether the post has at least one attachment. This is a default property. | def has_attachments
return @has_attachments
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def attachments?\n !attachments.empty?\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments=(value)\n @has_attachments = value\n end",
"def has_attachments\n preneed_attachments.present?\n end",
"def has_attachments?\n !(attachments.nil? || attachments.empty? || attachments[0].empty?)\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def has_attachments?\n !attachments.empty?\n end",
"def attached?\n attachments.any?\n end",
"def has_attachment?\n !!attachment_ids && attachment_ids.any?\n end",
"def has_files?\n attachments.count > 0\n end",
"def attached?\n self.attachments.map(&:state).any? { |state| state == \"attached\" || state == \"attaching\" }\n end",
"def attached?\n attachment.present?\n end",
"def has_attachment?(attachment_name)\n !!(self['_attachments'] && self['_attachments'][attachment_name] && !self['_attachments'][attachment_name].empty?)\n end",
"def attachment?\n !!find_attachment\n end",
"def attachments?\n self.primary_attachment.file? || self.secondary_attachment.file?\n end",
"def attachments\n if File.exists?(attachment_dir)\n return Dir.glob(File.join(attachment_dir, '*')).map { |f| Attachment.new(f, intname) }\n else\n false\n end\n end",
"def has_attachment?\n @has_attachment ||=\n mime_parts(\"text/plain\").any? do |type, fn, id, content|\n fn && (type !~ SIGNATURE_ATTACHMENT_TYPE)\n end\n end",
"def ensure_attachment_present\n if attachments.empty?\n false\n else\n errors.add(:base, 'Attachment needed')\n true\n end\n end",
"def attached?\n @options[:attached]\n end",
"def isAttached\n @attached\n end",
"def attachments\n mail&.attachments || []\n end",
"def attachment?\n attachment.present? && attachment.readable?\n end",
"def attachments\n @attachments ||= begin\n return message.attachments unless message.attachments.empty?\n if full_text_part.nil? && full_html_part.nil?\n [ message ]\n else\n []\n end\n end\n end",
"def attached?\n !!file\n end",
"def attachments\n process_message_body if !@attachments\n @attachments\n end",
"def attachments\n return @attachments\n end",
"def attachments\n return @attachments\n end",
"def attachments\n return @attachments\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments_metadata\n @attachments_metadata || {}\n end",
"def attachments\n @attachments\n end",
"def attachments\n @attachments ||= []\n end",
"def attachments\n @_message.attachments\n end",
"def attachments\n parts.attachments\n end",
"def contains_document?\n result = false\n sample_attachments.map{|sa| result = true if sa.is_document?}\n result\n end",
"def remember_attachments? #:doc:\n @remember_attachments ? true : false\n end",
"def attachment(key)\n # Use to_a.find to take advantage of the eager-loaded attachments and blobs.\n attachments.to_a.find { |a| a.key == key && a.file&.attached? }\n end",
"def attachments\n Easybill::Api::Attachments\n end",
"def multipart?\n message.multipart?\n end",
"def has_attachments(name, options = {})\n define_attachment_association :has_many, name, options\n end",
"def get_attach_size\n\n return 0 if self.mail_attachments.nil? or self.mail_attachments.empty?\n\n sum = 0\n\n self.mail_attachments.each do |attach|\n sum += attach.size\n end\n\n return sum\n end",
"def multipart?\n @multipart\n end",
"def multipart?\n @multipart\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def draft?\n data[\"draft\"] ||= relative_path.index(collection.relative_directory).nil? &&\n collection.label == \"posts\"\n end",
"def attached?\n not self.info.nil?\n end",
"def isAnnouncementPost\r\n\t\t\t\t\treturn @isAnnouncementPost\r\n\t\t\t\tend",
"def has_reported_photo?\n return (Photo.where(\"report_count > 0\").size > 0)\n end",
"def save_attachment?\n @save_attachment\n end",
"def has_photos?\n photos.length > 0\n end",
"def contains_image?\n result = false\n sample_attachments.map{|sa| result = true if sa.is_image?}\n result\n end",
"def has_images?\n Post.joins(:images).where('posts.id = ?', id).count > 0\n end",
"def poster?\n !read_attribute('poster').nil?\n end",
"def can_attach?(additional_size)\n\n return (additional_size <= (THETIS_MAIL_SEND_ATTACHMENT_MAX_KB*1024 - self.get_attach_size))\n end",
"def get_attachments_count\n begin\n \n if @filename == ''\n raise 'filename not specified'\n end\n \n \n str_uri = $product_uri + '/pdf/' + @filename + '/attachments'\n str_signed_uri = Aspose::Cloud::Common::Utils.sign(str_uri) \n\n response_stream = RestClient.get(str_signed_uri, {:accept=>'application/json'})\n \n stream_hash = JSON.parse(response_stream)\n \n return stream_hash['Attachments']['List'].length\n \n \n rescue Exception=>e\n print e\n end\n end",
"def attachments\n if attachment_ids\n attachment_ids.map {|id| Attachment.find(id)}\n else\n []\n end\n end",
"def show\n @post_attachments = @post.post_attachments.all\n end",
"def show\n @post_attachments = @post.post_attachments.all\n end",
"def detached?\n !attached?\n end",
"def attachments_permission\n @attributes[:attachments_permission]\n end",
"def attachments_permission\n @attributes[:attachments_permission]\n end",
"def multipart?\n false\n end",
"def detached?\n !self.attached?\n end",
"def posted?\n post.present?\n end",
"def is_file_activity_notification_enabled\n return @is_file_activity_notification_enabled\n end",
"def attachments=(value)\n @attachments = Array.new\n unless value.nil? || value.empty?\n value.each do |v1|\n if v1.instance_of? AttachmentJson\n @attachments.push(v1)\n end\n end\n end\n end",
"def attachments\n (msg['attachments']||{}).map{|attached| Mandrill::WebHook::Attachment[attached.last] }\n end",
"def multipart?\n true unless parts.empty?\n end",
"def wp_attachment_is_image(post = nil)\n return false if post.nil?\n Rails.cache.fetch('Railspress::' + 'Post.' + 'wp_attachment_is_image' + '/' + (((post.is_a?(Integer) || post.is_a?(String))) ? post : post.id).to_s ) {\n wp_attachment_is('image', post)\n }\n end",
"def wp_attachment_is(type, post = nil)\n post = get_post(post)\n return false if post.nil?\n\n return false if (!file = get_attached_file(post.id))\n\n return true if post.post_mime_type.index(type + '/') == 0\n\n check = wp_check_filetype(file)\n return false if check[:ext].blank?\n\n ext = check[:ext]\n\n if 'import' != post.post_mime_type\n return type == ext\n end\n\n case type\n when 'image'\n ['jpg', 'jpeg', 'jpe', 'gif', 'png'].include? ext\n when 'audio'\n wp_get_audio_extensions.include? ext\n when 'video'\n wp_get_video_extensions.include? ext\n else\n type == ext\n end\n end",
"def has_attachment(name, options = {})\n define_attachment_association :has_one, name, options\n end",
"def media_attachment_count\n self.case_media_attachments.try(:count) <= 10\n end",
"def attachments_for_export\n []\n end",
"def no_attachment_errors\n unless attachment.errors.empty?\n # uncomment this to get rid of the less-than-useful interim messages\n # errors.clear\n errors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n false\n end\n end",
"def is_final?\n attachment&.is_final?\n end",
"def no_attachment_errors\n unless attachment.errors.empty?\n # uncomment this to get rid of the less-than-useful interrim messages\n # errors.clear\n errors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n false\n end\n end",
"def files_embargoed\n return self['files_embargoed_bsi'] unless self['files_embargoed_bsi'].nil?\n return self['files_embargoed_tesim'].first.to_s == \"true\" if self['files_embargoed_tesim']\n false\n end",
"def has_submissions?\n submitted_files.any? || submitted_hyperlinks.present?\n end",
"def deliver_emails?\n if self.default_options[:deliver_emails].nil?\n true\n else\n self.default_options[:deliver_emails]\n end\n end",
"def has_assets?\n assets.none?\n end",
"def number_reported_photo?\n return Photo.where(\"report_count > 0\").size\n end",
"def attachment?(file_name)\n frm.link(:text=>file_name).exist?\n end",
"def get_attachment_leaves\n return _get_attachment_leaves_recursive(self.mail)\n end",
"def attached?\n not identifier.blank?\n end",
"def volume_group_attached?(vm_id, vol_group_uuid)\n vol_group = @vol_group_manager.get_volume_group(vol_group_uuid)\n return false unless vol_group.has_key?('attachment_list')\n vol_group['attachment_list'].each do |vm|\n return true if vm['vm_uuid'] == vm_id\n end\n false\n rescue => e\n raise e\n end",
"def attached?\n xml || model \n end",
"def exist_posts\n @exist_posts ||= FbPost.all.map {|a| a.post_id}\n end",
"def has_photo?\n send('file_uploader_url').present?\n end",
"def already_notified_for_attached_by_notifier?\n user.notifications.where(attached: attached, notifier: notifier).exists?\n end",
"def published?\n self.targets.map { |tgt| File.exist?(File.join(\n self.publish_path, self.to_s, tgt)) }.all?\n end",
"def pages_count_column?\n expected_column = default_pages_count_column\n @attachment.instance.class.columns.detect do |column|\n column.name.to_s == expected_column\n end\n end"
] | [
"0.7489739",
"0.74411935",
"0.74411935",
"0.74411935",
"0.74411935",
"0.74204534",
"0.7360084",
"0.73251975",
"0.73251975",
"0.7294886",
"0.7228975",
"0.70737815",
"0.69212735",
"0.686036",
"0.6769941",
"0.65512073",
"0.6496719",
"0.64298147",
"0.63883096",
"0.6359371",
"0.61654764",
"0.61552685",
"0.60865694",
"0.60827196",
"0.60096896",
"0.5903369",
"0.5868015",
"0.58521837",
"0.57588005",
"0.57588005",
"0.57588005",
"0.5719515",
"0.5719515",
"0.5719515",
"0.5716519",
"0.5678897",
"0.5658945",
"0.56452596",
"0.5521854",
"0.55155605",
"0.55067146",
"0.5471983",
"0.545802",
"0.54303974",
"0.5419527",
"0.5381189",
"0.535156",
"0.535156",
"0.53291297",
"0.5309029",
"0.5289034",
"0.5262983",
"0.52483606",
"0.5229613",
"0.52231467",
"0.52161485",
"0.5163861",
"0.5153657",
"0.5150509",
"0.51403654",
"0.5127616",
"0.51245785",
"0.5088749",
"0.5088749",
"0.5073074",
"0.5060552",
"0.5060552",
"0.50454134",
"0.50352097",
"0.5020393",
"0.500972",
"0.50077105",
"0.49996606",
"0.49958846",
"0.49958658",
"0.49924618",
"0.49690312",
"0.49606213",
"0.49467963",
"0.49383333",
"0.49306774",
"0.49023598",
"0.4901075",
"0.48985937",
"0.48978198",
"0.48902744",
"0.48800397",
"0.48638076",
"0.4845191",
"0.48444492",
"0.4830949",
"0.48178115",
"0.4815792",
"0.48139477",
"0.4810593",
"0.4807056",
"0.4804751"
] | 0.80477417 | 3 |
Sets the hasAttachments property value. Indicates whether the post has at least one attachment. This is a default property. | def has_attachments=(value)
@has_attachments = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n return @has_attachments\n end",
"def has_attachments\n preneed_attachments.present?\n end",
"def has_attachments?\n !(attachments.nil? || attachments.empty? || attachments[0].empty?)\n end",
"def attachments?\n !attachments.empty?\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def attachments?\n self.attachments.size > 0\n end",
"def has_attachments?\n !attachments.empty?\n end",
"def has_attachment?\n !!attachment_ids && attachment_ids.any?\n end",
"def has_files?\n attachments.count > 0\n end",
"def attached?\n attachments.any?\n end",
"def ensure_attachment_present\n if attachments.empty?\n false\n else\n errors.add(:base, 'Attachment needed')\n true\n end\n end",
"def has_attachments(name, options = {})\n define_attachment_association :has_many, name, options\n end",
"def has_attachment?(attachment_name)\n !!(self['_attachments'] && self['_attachments'][attachment_name] && !self['_attachments'][attachment_name].empty?)\n end",
"def attached?\n self.attachments.map(&:state).any? { |state| state == \"attached\" || state == \"attaching\" }\n end",
"def attached?\n attachment.present?\n end",
"def attachments?\n self.primary_attachment.file? || self.secondary_attachment.file?\n end",
"def attachments=(value)\n @attachments = Array.new\n unless value.nil? || value.empty?\n value.each do |v1|\n if v1.instance_of? AttachmentJson\n @attachments.push(v1)\n end\n end\n end\n end",
"def has_attachment?\n @has_attachment ||=\n mime_parts(\"text/plain\").any? do |type, fn, id, content|\n fn && (type !~ SIGNATURE_ATTACHMENT_TYPE)\n end\n end",
"def attachment?\n !!find_attachment\n end",
"def attached?\n @options[:attached]\n end",
"def has_attachment(name, options = {})\n define_attachment_association :has_one, name, options\n end",
"def attachments\n process_message_body if !@attachments\n @attachments\n end",
"def set_AttachmentsWithData(value)\n set_input(\"AttachmentsWithData\", value)\n end",
"def attachments\n if File.exists?(attachment_dir)\n return Dir.glob(File.join(attachment_dir, '*')).map { |f| Attachment.new(f, intname) }\n else\n false\n end\n end",
"def isAttached\n @attached\n end",
"def remember_attachments? #:doc:\n @remember_attachments ? true : false\n end",
"def update!(**args)\n @add_attachments = args[:add_attachments] if args.key?(:add_attachments)\n end",
"def attached?\n !!file\n end",
"def attachments=(attachments)\n attachments.each do |attachment|\n assets.build(attachment)\n end\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n @attachments ||= Attachments.new( self )\n end",
"def attachments\n mail&.attachments || []\n end",
"def update_has_attrs\n\t\tunless promotional == true\n\t\t\tself.has_video = (video_album.video_files.size > 0) ? true : false\n\t\t\tself.has_audio = (audio_album.audio_files.size > 0) ? true : false\n\t\t\tself.has_photos = (photo_album.square_photos.size > 0) ? true : false\n\t\t\tself.has_links = (page_links.count > 0 or created_page_link_this_time) ? true : false\n\t\tend\n\t\ttrue\n\tend",
"def update!(**args)\n @attachments = args[:attachments] if args.key?(:attachments)\n end",
"def update!(**args)\n @attachments = args[:attachments] if args.key?(:attachments)\n end",
"def attachments\n @attachments ||= []\n end",
"def attachment?\n attachment.present? && attachment.readable?\n end",
"def uploaded_attachments=(attachments)\n\t\tattachments.each do |attach|\n\t\t\tif attach && attach.size > 0\n\t\t\t\tattachment = Clip.new\n\t\t\t\tattachment.uploaded_data = attach\n\t\t\t\tself.clips << attachment\n\t\t\tend\n\t\tend\n\tend",
"def attachments\n @attachments ||= begin\n return message.attachments unless message.attachments.empty?\n if full_text_part.nil? && full_html_part.nil?\n [ message ]\n else\n []\n end\n end\n end",
"def save_attachment?\n @save_attachment\n end",
"def attachments_metadata\n @attachments_metadata || {}\n end",
"def setIsAnnouncementPost(isAnnouncementPost)\r\n\t\t\t\t\t@isAnnouncementPost = isAnnouncementPost\r\n\t\t\t\tend",
"def can_attach?(additional_size)\n\n return (additional_size <= (THETIS_MAIL_SEND_ATTACHMENT_MAX_KB*1024 - self.get_attach_size))\n end",
"def no_attachment_errors\n unless attachment.errors.empty?\n # uncomment this to get rid of the less-than-useful interim messages\n # errors.clear\n errors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n false\n end\n end",
"def no_attachment_errors\n unless attachment.errors.empty?\n # uncomment this to get rid of the less-than-useful interrim messages\n # errors.clear\n errors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n false\n end\n end",
"def set_post_attachment\n @post_attachment = PostAttachment.find(params[:id])\n end",
"def attachment(key)\n # Use to_a.find to take advantage of the eager-loaded attachments and blobs.\n attachments.to_a.find { |a| a.key == key && a.file&.attached? }\n end",
"def permitted_attachments_params\n params.permit(attachments: [])\n end",
"def microsoft_defender_for_endpoint_attach_enabled=(value)\n @microsoft_defender_for_endpoint_attach_enabled = value\n end",
"def new_file_attachment_attributes=(file_attachment_attributes)\n file_attachment_attributes.each do |attributes|\n file_attachments.build(attributes) unless attributes[\"attachment\"].blank?\n end\n end",
"def save_attachments attachments\n Attachment.add(attachments, self)\n end",
"def no_attachment_errors\n\t\tunless attachment.errors.empty?\n\t\t\t# uncomment this to get rid of the less-than-useful interim messages\n\t\t\t# errors.clear\n\t\t\terrors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n\t\t\tfalse\n\t\tend\n\tend",
"def show\n @post_attachments = @post.post_attachments.all\n end",
"def show\n @post_attachments = @post.post_attachments.all\n end",
"def loadAttachments( )\n\t\t\tif @filename\n\t\t\t\tattachmentsFile = File.join( File.dirname( @filename ), \"attachments.yaml\" )\n\t\t\t\tif File.file?( attachmentsFile )\n\t\t\t\t\t@attachments = YAML.load_file( attachmentsFile ) \n\t\t\t\tend \n\t\t\tend\n\t\tend",
"def contains_document?\n result = false\n sample_attachments.map{|sa| result = true if sa.is_document?}\n result\n end",
"def attachments\n @_message.attachments\n end",
"def volume_group_attached?(vm_id, vol_group_uuid)\n vol_group = @vol_group_manager.get_volume_group(vol_group_uuid)\n return false unless vol_group.has_key?('attachment_list')\n vol_group['attachment_list'].each do |vm|\n return true if vm['vm_uuid'] == vm_id\n end\n false\n rescue => e\n raise e\n end",
"def add_attachments(value)\n if value.instance_of? AttachmentJson\n @attachments.push(value)\n end\n end",
"def attached?\n not self.info.nil?\n end",
"def no_attachment_errors\n unless attachment.errors.empty? and !attachment_file_name.blank?\n # uncomment this to get rid of the less-than-useful interrim messages\n # errors.clear\n errors.add :attachment, \"Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file.\"\n false\n end\n end",
"def update\n respond_to do |format|\n if @post.update(post_params)\n @user.remove_attach! if params[:post][:remove_attach] == 1\n format.html { redirect_to @post, notice: 'Post was successfully updated.' }\n format.json { render :show, status: :ok, location: @post }\n else\n format.html { render :edit }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end",
"def has_photos?\n photos.length > 0\n end",
"def add_files?\n false\n end",
"def attach(attachment)\n if attachment_ids\n self.attachment_ids |= [attachment.id] if attachment_ids.size < MAX_ATTACHMENTS\n else\n self.attachment_ids = [attachment.id]\n end\n self.joined_attachments << attachment\n end",
"def has_attachment(name)\n id_field_getter_name = \"#{name}_id\".to_sym\n id_field_setter_name = \"#{id_field_getter_name}=\".to_sym\n\n field id_field_getter_name, :type => BSON::ObjectID\n\n define_method(\"#{name}=\".to_sym) do |value|\n unmemoize(name)\n\n # Remove any existing file from the grid.\n old_id = send(id_field_getter_name)\n unless old_id.nil?\n self.class.grid.delete(old_id)\n end\n\n # Store the new file on the grid, or set our ID to nil if we have\n # no file.\n if value.nil?\n send(id_field_setter_name, nil)\n else\n id = self.class.put_on_grid(value)\n send(id_field_setter_name, id)\n end\n end\n\n define_method(name) do\n memoized(name) do\n id = send(id_field_getter_name)\n id.nil? ? nil : self.class.grid.get(id)\n end\n end\n\n # This makes sure we get unmemoized on reloads, and helps us with\n # recursive destroy.\n opts = Mongoid::Associations::Options.new(:name => name,\n :foreign_key =>\n id_field_getter_name)\n associations[name] =\n Mongoid::Associations::MetaData.new(MongoidAttachment, opts)\n end",
"def multipart?\n false\n end",
"def set_travel_post_attachment\n @travel_post_attachment = TravelPostAttachment.find(params[:id])\n end",
"def media_attachment_count\n self.case_media_attachments.try(:count) <= 10\n end",
"def create\n @rent_post = RentPost.new(rent_post_params)\n # @post_attachment = @rent_post.post_attachments.build\n\n respond_to do |format|\n if @rent_post.save\n if params[:post_attachments]!=nil\n params[:post_attachments]['image'].each do |a|\n @post_attachment = @rent_post.post_attachments.create!(:image => a, :rent_post_id => @rent_post.id)\n end\n else\n @post_attachment = @rent_post.post_attachments.create!(:image => nil, :rent_post_id => @rent_post.id)\n end\n format.html { redirect_to @rent_post, notice: 'Rent post was successfully created.' }\n format.json { render :show, status: :created, location: @rent_post }\n else\n format.html { render :new }\n format.json { render json: @rent_post.errors, status: :unprocessable_entity }\n end\n end\n end",
"def save_attachments(params)\n params[:post_attachment]['image'].each do |photo|\n if self.post_attachments(:reload).count < 10\n self.post_attachments.create!(:image => photo)\n else\n # note that a change, market postings can have 10, regular postings have 3\n errors.add(:base, \"Maximum 10 Pictures\")\n end\n end\n end",
"def update_attachment_references\n return if attachment_references.empty?\n\n ids = attachment_reference_ids_removed\n attachment_references.each do |attachment_reference|\n attachment_reference.mark_for_destruction if ids.include?(attachment_reference.id)\n end\n end",
"def attachments\n Easybill::Api::Attachments\n end",
"def is_file_activity_notification_enabled=(value)\n @is_file_activity_notification_enabled = value\n end",
"def attachment_reference_changed?\n !!@attachment_changed\n end",
"def attachment_reference_changed?\n !!@attachment_changed\n end",
"def deliver_emails?\n if self.default_options[:deliver_emails].nil?\n true\n else\n self.default_options[:deliver_emails]\n end\n end",
"def attachments\n @attachments\n end",
"def attachments\n parts.attachments\n end",
"def draft?\n data[\"draft\"] ||= relative_path.index(collection.relative_directory).nil? &&\n collection.label == \"posts\"\n end",
"def post_params\n params.require(:post).permit(:title, post_attachments_attributes: [:id, :post_id, :avatar])\n end",
"def multipart?\n @multipart\n end",
"def multipart?\n @multipart\n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def attachments=(atts)\n atts.each do |attachment|\n if attachment[:id].blank?\n assets.build(attachment)\n else\n asset = assets.detect { |a| a.id == attachment[:id].to_i }\n end\n end \n end",
"def add_file_attachment(new_attachments)\n attachments = @task.attachments\n attachments += new_attachments\n @task.attachments = attachments\n end",
"def published?\n self.targets.map { |tgt| File.exist?(File.join(\n self.publish_path, self.to_s, tgt)) }.all?\n end",
"def multipart?\n message.multipart?\n end"
] | [
"0.6699245",
"0.6699245",
"0.6699245",
"0.6699245",
"0.6426565",
"0.63194615",
"0.62844735",
"0.622033",
"0.622033",
"0.622033",
"0.61577463",
"0.61577463",
"0.6126364",
"0.6028388",
"0.5874492",
"0.5825956",
"0.5799817",
"0.57327735",
"0.5727103",
"0.5637154",
"0.5509644",
"0.5416474",
"0.53650975",
"0.53201616",
"0.52345616",
"0.5223255",
"0.51816034",
"0.51271135",
"0.51161784",
"0.50767726",
"0.50418663",
"0.4994518",
"0.49359217",
"0.49313918",
"0.49165666",
"0.48337638",
"0.48337638",
"0.48210055",
"0.48204276",
"0.4816181",
"0.47484052",
"0.47369006",
"0.47369006",
"0.47174332",
"0.4713823",
"0.47082677",
"0.46985602",
"0.46913555",
"0.468589",
"0.4668581",
"0.4663165",
"0.46091405",
"0.458355",
"0.45800138",
"0.45770946",
"0.45068616",
"0.45024708",
"0.44531998",
"0.44416788",
"0.44300961",
"0.44195136",
"0.44195136",
"0.44059077",
"0.43947613",
"0.43945587",
"0.43917358",
"0.43895417",
"0.4376911",
"0.43749836",
"0.43728665",
"0.43319997",
"0.4324381",
"0.43139327",
"0.43052384",
"0.43014196",
"0.42911637",
"0.4272738",
"0.42665973",
"0.4263861",
"0.42611",
"0.4259289",
"0.42481092",
"0.42456454",
"0.42456454",
"0.42443195",
"0.42296168",
"0.422496",
"0.42080176",
"0.41941947",
"0.419399",
"0.419399",
"0.4193797",
"0.4193797",
"0.4193797",
"0.41931388",
"0.41914007",
"0.41873366"
] | 0.78124654 | 3 |
Gets the inReplyTo property value. Readonly. Supports $expand. | def in_reply_to
return @in_reply_to
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def in_reply_to\n headers['In-Reply-To']\n end",
"def reply_to\n address_list_fetch('reply-to')\n end",
"def in_reply_to=(value)\n @in_reply_to = value\n end",
"def get_reply_to\n @reply_to\n end",
"def set_InReplyTo(value)\n set_input(\"InReplyTo\", value)\n end",
"def reply_to\n return @reply_to\n end",
"def reply_to\n address_impl = @message_impl.getReplyTo\n # only return an address if a reply to was specified\n Qpid::Messaging::Address.new(nil, address_impl) if address_impl\n end",
"def reply_address\n @reply_address ||= ReplyAddress.decode(recipient_param)\n end",
"def reply_to\n (@reply_to || self.from)\n end",
"def reply_to\n @reply_to\n end",
"def inbox\n return @defaults.objectForKey(:inbox)\n end",
"def get_reply\n @reply\n end",
"def reply_to=(value)\n @reply_to = value\n end",
"def reply_to(value = nil)\n if value.nil?\n @reply_to\n else\n @reply_to = value\n end\n end",
"def replying_to\n return nil unless self.reply?\n user = self.text[0...self.text.index(\" \")]\n return nil unless user[0...1] == \"@\"\n user\n end",
"def replying_to\n return nil unless self.reply?\n user = self.text[0...self.text.index(\" \")]\n return nil unless user[0...1] == \"@\"\n user\n end",
"def in_reply_to \n self.parent\n end",
"def get_message\n @reply['message']\n end",
"def user_ref\n @messaging['optin']['user_ref']\n end",
"def inbox_id\n @attributes[:inbox_id]\n end",
"def including_replies \n\t\tif content[0] == \"@\"\n\t\t\tat_to_user = content.split(\".\")[0]\n\t\t\tid_number = at_to_user[1, at_to_user.length-1]\n\t\tend\n\tend",
"def reply?\n !self.in_reply_to.nil?\n end",
"def reply?\n !self.in_reply_to.nil?\n end",
"def automatic_replies_setting\n return @automatic_replies_setting\n end",
"def recipient\n return @recipient\n end",
"def reply\n if acknowledged?\n replies.first\n else\n nil\n end\n end",
"def in_reply_to_user\n previous_tweet.try(:author).try(:screen_name) || params[:in_reply_to_user]\n end",
"def reply_to=( val )\n header[:reply_to] = val\n end",
"def ref\n @messaging['optin']['ref']\n end",
"def recipient\n return @recipient\n end",
"def current_user\n @current_user ||= reply_address.user\n end",
"def reply_chain_message_id\n return @reply_chain_message_id\n end",
"def reply_text\n root['ReplyText']\n end",
"def recipient_email\n return @recipient_email\n end",
"def recipient_email\n return @recipient_email\n end",
"def recipient_contains\n return @recipient_contains\n end",
"def to\n address_list_fetch('to')\n end",
"def get_address\n return @m_address\n end",
"def parent_reply\n Reply.find_by_id(self.parent_id)\n end",
"def reply_to( val = nil )\n default :reply_to, val\n end",
"def sender_address\n @sender_address || ''\n end",
"def phone_number\n\t\t\trecipient.phone_number\n\t\tend",
"def to\n @to ||= unquoted_address_header(:to)\n end",
"def reply?\n !!in_reply_to_status_id\n end",
"def outgoing_value\n @attrs[\"outgoing\"]\n end",
"def is_message?\n in_reply_to && self.private\n end",
"def reply\n @reply\n end",
"def authored_replies\n Reply.find_by_user_id(self.id)\n end",
"def reply_to\n sent_to_channel? ? dest : nick\n end",
"def from\n @property[:from]\n end",
"def readable_inspect\n\t\tto_s(:rfc822)\n\tend",
"def has_inbox\n @attributes[:has_inbox]\n end",
"def inbound_address\n [\"#{slug}-in\", inbound_host].join('@')\n end",
"def user\n if scope.id == object.sender_user_id\n object.recipient_user\n else\n object.sender_user\n end\n end",
"def message_id\n @mail.message_id\n end",
"def originator\n @originator ||= original_message.sender\n end",
"def unread_by?(user)\n received_message.try(:unread?) || replies.map { |r|\n r.received_message if r.recipient == user\n }.compact.any?(&:unread?)\n end",
"def unread_messages\n @attributes[\"unread_messages\"]\n end",
"def originator\n @originator = self.original_message.sender if @originator.nil?\n return @originator\n end",
"def message_id\n self['message-id']\n end",
"def get_from_exp\n\n return nil if self.from_address.nil?\n\n from_exp = \"<#{self.from_address}>\"\n\n unless self.from_name.nil? or self.from_name.empty?\n from_exp = \"#{self.from_name} #{from_exp}\"\n end\n\n return from_exp\n end",
"def get_address\n @address\n end",
"def reply_to(data=nil)\n set_get __method__, data\n end",
"def sender_message\n return @sender_message\n end",
"def current_invoice_address\n return invoice_address if chain.blank?\n return chain.invoice_address if self.TypeOfChain == 1\n invoice_address\n end",
"def recipient(current_user)\n \tself.sender_id == current_user.id ? self.receiver : self.sender\n \tend",
"def message_uri\n return @message_uri\n end",
"def set_ReplyEmail(value)\n set_input(\"ReplyEmail\", value)\n end",
"def post\n post = Post.get_from_cache(reply_to)\n post || Post.first(:id => reply_to)\n end",
"def user_id\n @message[:user][:id]\n end",
"def get_email_recipient\n user == chat.sender ? chat.recipient : chat.sender\n end",
"def email_address\n \"CoursAvenue <#{token}@#{CoursAvenue::Application::MANDRILL_REPLY_TO_DOMAIN}>\"\n end",
"def reply_to=(address)\n address = Qpid::Messaging::Address.new \"#{address}\" if !address.is_a? Qpid::Messaging::Address\n\n @message_impl.setReplyTo address.address_impl\n end",
"def replies\n messages.joins(:reply_relationships).distinct\n end",
"def email_address\n raw_info['email_addresses'] && raw_info['email_addresses'].first\n end",
"def notification_recipient_scope\n return @notification_recipient_scope\n end",
"def authored_replies\n Replies.find_by_user_id(@id)\n end",
"def remote_ip\n return self[\"client-ip\"] || @forwarded_for || @peeraddr[3]\n end",
"def pinned_messages\n return @pinned_messages\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def email_inboxes\n @attributes[:email_inboxes]\n end",
"def ipaddress\n @attributes.fetch('ipaddress', nil)\n end",
"def recipients_from_delivered_to\n @recipients_from_delivered_to ||= unquoted_address_header('delivered-to')\n end",
"def comment_reply\n CommentMailer.comment_reply(Comment.where('parent_id is not null').last.id)\n end",
"def recipient\n return self.unless_nil?('invitee.email', deliveries.unless_nil?('first.recipients', '') )\n end",
"def get_unread(user)\n list = []\n if unread?(user)\n list.push(self)\n end\n public_replies(user).unread_by(user).each do |reply|\n list.push(reply)\n end\n list\n end",
"def is_reply?\n\t\tthis.is_reply\n\tend",
"def message_id\n return @message_id\n end",
"def current_recipient_child\n (status == :accept_recip_child || status == :accepted_full ) ? requester : recipient\n end",
"def set_inquiry_reply\n @inquiry_reply = InquiryReply.find(params[:id])\n end",
"def message_id\n @message_id\n end",
"def recipient_domain\n self.rcpt_to ? self.rcpt_to.split('@').last : nil\n end",
"def recipient_domain\n self.rcpt_to ? self.rcpt_to.split('@').last : nil\n end",
"def get_note\n note = queued_email_note\n note ? note.value.to_s : nil\n end",
"def address\n return @address\n end",
"def address\n return @address\n end",
"def enable_inbox_replies\n client.post('/api/sendreplies', id: read_attribute(:name), state: true)\n end",
"def email_address\n @data['emailAddress']\n end",
"def inbox_code\n @attributes[:inbox_code]\n end",
"def address\n @address ||= Mail::Address.new(self.to_s) rescue nil\n end"
] | [
"0.69207305",
"0.6692837",
"0.6652508",
"0.6604852",
"0.6496073",
"0.63015074",
"0.62397194",
"0.6159394",
"0.60975665",
"0.59925383",
"0.5813927",
"0.5728539",
"0.56737214",
"0.560972",
"0.55890465",
"0.55890465",
"0.55776894",
"0.5448267",
"0.5428158",
"0.539586",
"0.53702706",
"0.5316445",
"0.5316445",
"0.53102946",
"0.52985597",
"0.5279633",
"0.52758",
"0.5259247",
"0.5229995",
"0.5226047",
"0.52082276",
"0.5181884",
"0.5173577",
"0.51585704",
"0.51585704",
"0.51167756",
"0.5086096",
"0.504581",
"0.50107396",
"0.50077075",
"0.49613437",
"0.49420097",
"0.49392608",
"0.4905658",
"0.48858505",
"0.48814347",
"0.48651072",
"0.48647147",
"0.48646852",
"0.47901106",
"0.4785528",
"0.4781367",
"0.47569934",
"0.4751184",
"0.47458524",
"0.47211254",
"0.4708412",
"0.4707795",
"0.4706609",
"0.4704943",
"0.4704774",
"0.4684318",
"0.46831208",
"0.46741706",
"0.46702307",
"0.46691585",
"0.46690053",
"0.46682206",
"0.46571335",
"0.46435067",
"0.4639211",
"0.46388823",
"0.4637483",
"0.4635483",
"0.46269953",
"0.462539",
"0.4624139",
"0.4615193",
"0.4612349",
"0.46117955",
"0.46107274",
"0.46074727",
"0.4597409",
"0.45887822",
"0.45875126",
"0.45751196",
"0.4571878",
"0.4556861",
"0.45551464",
"0.45527864",
"0.45520267",
"0.45493048",
"0.45493048",
"0.45480266",
"0.45445627",
"0.45445627",
"0.45402575",
"0.45397112",
"0.45317945",
"0.45277256"
] | 0.72811353 | 0 |
Sets the inReplyTo property value. Readonly. Supports $expand. | def in_reply_to=(value)
@in_reply_to = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_InReplyTo(value)\n set_input(\"InReplyTo\", value)\n end",
"def reply_to=(value)\n @reply_to = value\n end",
"def reply_to=( val )\n header[:reply_to] = val\n end",
"def reply_to=(address)\n address = Qpid::Messaging::Address.new \"#{address}\" if !address.is_a? Qpid::Messaging::Address\n\n @message_impl.setReplyTo address.address_impl\n end",
"def reply_to(value = nil)\n if value.nil?\n @reply_to\n else\n @reply_to = value\n end\n end",
"def set_ReplyEmail(value)\n set_input(\"ReplyEmail\", value)\n end",
"def in_reply_to\n return @in_reply_to\n end",
"def in_reply_to\n headers['In-Reply-To']\n end",
"def set_inquiry_reply\n @inquiry_reply = InquiryReply.find(params[:id])\n end",
"def reply_to\n (@reply_to || self.from)\n end",
"def reply_to\n address_list_fetch('reply-to')\n end",
"def enable_inbox_replies\n client.post('/api/sendreplies', id: read_attribute(:name), state: true)\n end",
"def reply_to( val = nil )\n default :reply_to, val\n end",
"def set_inquiry_reply\n @inquiry_reply = InquiryReply.find(params[:id])\n end",
"def reply_to=(addresses)\n address_list_assign('Reply-To', addresses)\n end",
"def reply_to\n address_impl = @message_impl.getReplyTo\n # only return an address if a reply to was specified\n Qpid::Messaging::Address.new(nil, address_impl) if address_impl\n end",
"def automatic_replies_setting=(value)\n @automatic_replies_setting = value\n end",
"def reply_address\n @reply_address ||= ReplyAddress.decode(recipient_param)\n end",
"def set_replied_to\n if reply?\n parent.replied_at = Time.now\n parent.save!\n end\n end",
"def reply_to\n @reply_to\n end",
"def set_reply\n @reply = Reply.find(params[:id] || params[:reply_id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Replie.find(params[:id])\n end",
"def get_reply_to\n @reply_to\n end",
"def set_recipient\n end",
"def set_reply(reply)\n @reply = reply\n end",
"def reply_chain_message_id=(value)\n @reply_chain_message_id = value\n end",
"def in_reply_to \n self.parent\n end",
"def set_user_reply\n @user_reply = UserReply.find(params[:id])\n end",
"def set_reply_flag\n @reply_flag = ReplyFlag.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def set_reply\n @reply = Reply.find(params[:id])\n end",
"def reply_to\n return @reply_to\n end",
"def set_notice_reply\n @notice_reply = NoticeReply.find(params[:id])\n end",
"def reply_mention(reply_to_user, in_reply_to_id)\n log \"replying to mention\"\n post_reply(reply_to_user, in_reply_to_id, random_mention(reply_to_user.id))\n end",
"def reply_to(m)\n @message_type = METHOD_RETURN\n @reply_serial = m.serial\n @destination = m.sender\n self\n end",
"def set_note_reply\n @note_reply = NoteReply.find(params[:id])\n end",
"def set_post_reply\n @post_reply = PostReply.find(params[:id])\n end",
"def set_reply_answer\n @reply_answer = ReplyAnswer.find(params[:id])\n end",
"def set_reply\n @post = Post.find(params[:post_id])\n @comment = @post.comments.find(params[:comment_id])\n @reply = @comment.replies.find(params[:id])\n end",
"def reply_to(data=nil)\n set_get __method__, data\n end",
"def reply?\n !self.in_reply_to.nil?\n end",
"def reply?\n !self.in_reply_to.nil?\n end",
"def disable_inbox_replies\n client.post('/api/sendreplies', id: read_attribute(:name), state: false)\n end",
"def is_automatic_reply=(value)\n @is_automatic_reply = value\n end",
"def reply_meitan(reply_to_user, in_reply_to_id)\n log \"replying to meitan\"\n post_reply(reply_to_user, in_reply_to_id, random_notmeitan)\n end",
"def recipient=(value)\n @recipient = value\n end",
"def from=(address)\n self.sender = address\n end",
"def inquiry_reply_params\n params.require(:inquiry_reply).permit(:reply, :user_id, :inquiry_detail_id)\n end",
"def set_from_email_address\n self.from_user_email ||= Settings::NotificationsFromEmail || user&.email\n end",
"def pinned_messages=(value)\n @pinned_messages = value\n end",
"def set_rreply\n @rreply = Rreply.find(params[:id])\n end",
"def reply_params\n params.require(:reply).permit(:message, :realtor_id, :inquiry_id)\n end",
"def recipient=(value)\n @recipient = value\n end",
"def set_inbox\n @inbox = current_user.inboxes.find(params[:id])\n end",
"def set_thread_for_replies\n self.thread = self.commentable.thread if self.reply_comment?\n end",
"def reply_to\n sent_to_channel? ? dest : nick\n end",
"def smtp_envelope_from=( val )\n @smtp_envelope_from = val\n end",
"def set_reply_puntuation\n @reply_puntuation = ReplyPuntuation.find(params[:id])\n end",
"def set_admin_reply\n @admin_reply = Admin::Reply.find(params[:id])\n end",
"def reply_params\n params.require(:reply).permit(:body, :repliable_id)\n end",
"def comment_replied_to(options = {})\n send_comment_notification(options, \"reply_notice\")\n end",
"def sent_to_addresses=(value)\n @sent_to_addresses = value\n end",
"def in_reply_to_user\n previous_tweet.try(:author).try(:screen_name) || params[:in_reply_to_user]\n end",
"def reply(opts = {})\n self.dup.reply! opts\n end",
"def publish_opts\n options = {}\n options[:in_reply_to_status_id] = in_reply_to_status_id if reply?\n options\n end",
"def set_replies(prompt)\n if prompt.replies.length == 0\n @reply = prompt.replies.build\n else\n @reply = prompt.replies.last\n end\n end",
"def smtp_envelope_from( val = nil )\n if val\n self.smtp_envelope_from = val\n else\n @smtp_envelope_from || return_path || sender || from_addrs.first\n end\n end",
"def set_inbox\n @inbox = Inbox.find(params[:id])\n end",
"def set_inbox\n @inbox = Inbox.find(params[:id])\n end",
"def forward_as_attachment_to=(value)\n @forward_as_attachment_to = value\n end",
"def reply(message, to = nil)\n if to == :room\n reply_to.reply(message, nil)\n else\n reply_to.reply(message, to || private_sender)\n end\n end",
"def set_inbox_item\n @inbox_item = InboxItem.find(params[:id])\n end",
"def resent_to=( val )\n header[:resent_to] = val\n end",
"def reply_params\n params.require(:reply).permit(:post_id, :body, :repliable_id, :repliable_type, :parent_id)\n end",
"def reply_to_line(reply_text)\n return nil if reply_text.nil?\n\n #Get reply token\n reply_token = params['events'][0]['replyToken'] \n \n #Set reply message\n message = {\n type: 'text',\n text: reply_text\n }\n\n #Send message\n line.reply_message(reply_token, message)\n end",
"def current_user\n @current_user ||= reply_address.user\n end",
"def user_reply_params\n params.require(:user_reply).permit(:user_id, :reply_id)\n end",
"def reply!(opts = {})\n opts = {:remove_children => false}.merge opts\n self.to, self.from = self.from, self.to\n self.children.remove if opts[:remove_children]\n self\n end",
"def forward_to=(value)\n @forward_to = value\n end",
"def casein_config_email_from_address\n\t\t'donotreply@caseincms.com'\n\tend",
"def sent_to_me=(value)\n @sent_to_me = value\n end",
"def reply_to_all\n message = reply\n message.cc(cc)\n message.bcc(bcc)\n message\n end",
"def reply?\n !!in_reply_to_status_id\n end",
"def set_inbox\n @inbox = Inbox.find(params[:id])\n @account_inbox = @inbox.account_inboxes.find_by_account_id(current_account.id)\n end",
"def reply_params\n params.require(:reply).permit(:user_id, :parent_id, :post_id, :title, :text)\n end",
"def show\n @reply = Message.new\n @reply.message_id = @message.id\n @reply.subject = \"Reply to > #{@message.subject}\"\n end",
"def inquiry_params\n _inquiry_params = params.require(:inquiry).permit(:reply_email, :body)\n _inquiry_params.merge!(user_id: current_user.id) if user_signed_in?\n _inquiry_params\n end",
"def set_groupreply\n @groupreply = Groupreply.find(params[:id])\n end"
] | [
"0.8516198",
"0.70250094",
"0.6587329",
"0.6471791",
"0.6452012",
"0.6220507",
"0.6202828",
"0.6160427",
"0.61155653",
"0.60962045",
"0.60667175",
"0.5961754",
"0.5927536",
"0.586672",
"0.58318466",
"0.582511",
"0.5732018",
"0.5682525",
"0.56577617",
"0.56074256",
"0.5570272",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.556731",
"0.55172384",
"0.55160767",
"0.5515229",
"0.55068225",
"0.54608303",
"0.5458158",
"0.5440838",
"0.54081434",
"0.54025",
"0.54025",
"0.5369289",
"0.5362006",
"0.5318182",
"0.5249087",
"0.52441186",
"0.52237165",
"0.51905537",
"0.5160116",
"0.5147791",
"0.51418775",
"0.51418775",
"0.5118412",
"0.50791377",
"0.5062414",
"0.5038224",
"0.5012578",
"0.500671",
"0.49886203",
"0.49870414",
"0.4957213",
"0.4947732",
"0.49428436",
"0.49301955",
"0.49134332",
"0.4909639",
"0.49071854",
"0.4905467",
"0.48978445",
"0.48742253",
"0.48494107",
"0.48258546",
"0.47828022",
"0.4775052",
"0.47743237",
"0.47543532",
"0.47534648",
"0.4739482",
"0.4739482",
"0.47365886",
"0.4732188",
"0.47295973",
"0.47245976",
"0.47160608",
"0.47063687",
"0.4680836",
"0.46796834",
"0.46732998",
"0.466208",
"0.4661531",
"0.46514732",
"0.4642264",
"0.46416062",
"0.46402308",
"0.46402088",
"0.46394515",
"0.46387935",
"0.463562"
] | 0.8018943 | 1 |
Gets the multiValueExtendedProperties property value. The collection of multivalue extended properties defined for the post. Readonly. Nullable. | def multi_value_extended_properties
return @multi_value_extended_properties
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def extension_properties\n return @extension_properties\n end",
"def parse_extended_properties(extended_properties) # :nodoc\n query_parts = []\n ['shared', 'private'].each do |prop_type|\n next unless extended_properties[prop_type]\n query_parts << extended_properties[prop_type].map {|key, value| (prop_type == \"shared\" ? \"sharedExtendedProperty=\" : \"privateExtendedProperty=\") + \"#{key}%3D#{value}\" }.join(\"&\")\n end\n query_parts.join('&')\n end",
"def payload_extended_options\n opts['Payload'] ? opts['Payload']['ExtendedOptions'] : nil\n end",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def field_extras\r\n return @field_extras\r\n end",
"def custom_fields_response\n data[:properties].custom_field_property do\n key :type, \"array\"\n key :xml, wrapped: true\n items do\n key :\"$ref\", \"CustomFieldValueApiResponse\"\n end\n end\n end",
"def get_attr_value_from_ext_attr( m_name, args )\n \n if self.new_record?\n \n value = nil\n \n self.extended_attributes.each do |extended_attr|\n value = ( extended_attr.value.blank? ? nil : extended_attr.value ) if( extended_attr.attr_name == m_name )\n end\n value\n \n else\n extended_attribute = find_in_extended_attr( m_name )\n extended_attribute.value if extended_attribute\n end\n \n end",
"def properties\n return @values['properties'] if @values.key?('properties')\n @values['properties'] = {}\n @values['properties']\n end",
"def multi_valued?\n @multi_valued\n end",
"def get_properties()\n return @properties\n end",
"def additional_properties\n @additional_properties.nil? ? true : @additional_properties\n end",
"def multivalued\n return @multivalued\n end",
"def value\n if allows_nil? && properties.values.all?(&:nil?)\n nil\n else\n Hash[properties.map { |key, value| [key, value.value] }]\n end\n end",
"def get_allowed_property_details\n allowed_property_details = []\n selectable_property_details = PropertyDetail.where(:selectable => true)\n selectable_property_details.each do |pd|\n allowed_property_details.push(pd[:msgid])\n end\n\n return allowed_property_details\n end",
"def payload_extended_options(explicit_target = nil)\n explicit_target ||= target\n\n if explicit_target and explicit_target.payload_extended_options\n explicit_target.payload_extended_options\n else\n payload_info['ExtendedOptions']\n end\n end",
"def modified_properties\n return @modified_properties\n end",
"def multifactor\n attributes['multifactor']\n end",
"def values_for_properties; end",
"def get_multi_post_attributes_2(key, value)\n a = JSON.parse(self.class.post('/postcodes', body:{ \"postcodes\": postcodes_arr}).body)\n a[\"#{key}\"][1][\"#{key}\"][\"#{value}\"]\n end",
"def propvaluearray(name) #:nodoc:\n @properties.select{ |f| f.name? name }.map{ |p| p.value }\n end",
"def extended_folder_permissions\n @attributes[:extended_folder_permissions]\n end",
"def get_multi_post_attributes_1(key, value)\n a = JSON.parse(self.class.post('/postcodes', body:{ \"postcodes\": postcodes_arr}).body)\n a[\"#{key}\"][0][\"#{key}\"][\"#{value}\"]\n end",
"def multi_value?\n self[:value] ||= {}\n is_check_box_type?(/^multi$/)\n end",
"def get_field_deserializers()\n return super.merge({\n \"attachments\" => lambda {|n| @attachments = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Attachment.create_from_discriminator_value(pn) }) },\n \"body\" => lambda {|n| @body = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::ItemBody.create_from_discriminator_value(pn) }) },\n \"conversationId\" => lambda {|n| @conversation_id = n.get_string_value() },\n \"conversationThreadId\" => lambda {|n| @conversation_thread_id = n.get_string_value() },\n \"extensions\" => lambda {|n| @extensions = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Extension.create_from_discriminator_value(pn) }) },\n \"from\" => lambda {|n| @from = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"hasAttachments\" => lambda {|n| @has_attachments = n.get_boolean_value() },\n \"inReplyTo\" => lambda {|n| @in_reply_to = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Post.create_from_discriminator_value(pn) }) },\n \"multiValueExtendedProperties\" => lambda {|n| @multi_value_extended_properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::MultiValueLegacyExtendedProperty.create_from_discriminator_value(pn) }) },\n \"newParticipants\" => lambda {|n| @new_participants = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"receivedDateTime\" => lambda {|n| @received_date_time = n.get_date_time_value() },\n \"sender\" => lambda {|n| @sender = n.get_object_value(lambda {|pn| MicrosoftGraph::Models::Recipient.create_from_discriminator_value(pn) }) },\n \"singleValueExtendedProperties\" => lambda {|n| @single_value_extended_properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::SingleValueLegacyExtendedProperty.create_from_discriminator_value(pn) }) },\n })\n end",
"def list_invalid_properties\n invalid_properties = Array.new\n if @value.nil?\n invalid_properties.push(\"invalid value for 'value', value cannot be nil.\")\n end\n\n return invalid_properties\n end",
"def custom_properties\n custom_attribute_names\n end",
"def extension_attribute7\n return @extension_attribute7\n end",
"def properties\n return @properties\n end",
"def values\n @options[:values].presence\n end",
"def properties\n self.values\n end",
"def customized_properties\n current_resource.info['properties'].select do |_k, v|\n v['is_set'] == true\n end\n end",
"def property_properties\n _property_properties\n end",
"def properties\n { 'object_type' => 'array', 'elements' => @elements.map(&:properties) }\n end",
"def field_value\n if check_version_of(\"mail\", \"> 2.7.0\")\n %w(unparsed_value)\n elsif check_version_of(\"mail\", \"= 2.7.0\")\n %w(instance_variable_get @unparsed_value)\n elsif check_version_of(\"mail\", \"< 2.7.0\")\n %w(instance_variable_get @value)\n end\n end",
"def extension_attribute3\n return @extension_attribute3\n end",
"def extension_attribute14\n return @extension_attribute14\n end",
"def property_options\n return @array.map {|property|\n {\n name: property.print_list,\n value: property.property_id\n }\n }\n end",
"def extension_attribute6\n return @extension_attribute6\n end",
"def extras\n @extras\n end",
"def extras\n @values[@names.length..-1] || []\n end",
"def values\n @options['values']\n end",
"def get_field_deserializers()\n return super.merge({\n \"description\" => lambda {|n| @description = n.get_string_value() },\n \"owner\" => lambda {|n| @owner = n.get_string_value() },\n \"properties\" => lambda {|n| @properties = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::ExtensionSchemaProperty.create_from_discriminator_value(pn) }) },\n \"status\" => lambda {|n| @status = n.get_string_value() },\n \"targetTypes\" => lambda {|n| @target_types = n.get_collection_of_primitive_values(String) },\n })\n end",
"def extension_fields\n self.class.extension_fields\n end",
"def custom_fields_request\n data[:properties].custom_field_property do\n key :type, \"array\"\n items do\n key :\"$ref\", \"CustomFieldValueApiRequest\"\n end\n end\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end"
] | [
"0.7393384",
"0.7393384",
"0.7393384",
"0.7101371",
"0.7101371",
"0.7101371",
"0.63477707",
"0.63477707",
"0.63477707",
"0.541738",
"0.5293812",
"0.50841457",
"0.50493103",
"0.4986824",
"0.49632427",
"0.4952229",
"0.48423252",
"0.4840834",
"0.47651646",
"0.47634137",
"0.4754878",
"0.47454467",
"0.47449282",
"0.46477538",
"0.46471474",
"0.4625119",
"0.461306",
"0.45789278",
"0.45546246",
"0.4551972",
"0.45345786",
"0.45099193",
"0.44988868",
"0.4492795",
"0.44840437",
"0.44640908",
"0.44567317",
"0.4436221",
"0.44234616",
"0.44196922",
"0.4397844",
"0.43893975",
"0.43889233",
"0.43827698",
"0.43777326",
"0.43711382",
"0.43672073",
"0.4365419",
"0.4358311",
"0.43579626",
"0.43488088",
"0.43328434",
"0.43228036",
"0.43207788",
"0.43205392",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096",
"0.43202096"
] | 0.80404097 | 2 |
Sets the multiValueExtendedProperties property value. The collection of multivalue extended properties defined for the post. Readonly. Nullable. | def multi_value_extended_properties=(value)
@multi_value_extended_properties = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def multivalued=(value)\n @multivalued = value\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def extended_ingredients=(extended_ingredients)\n if !extended_ingredients.nil? && extended_ingredients.length < 0\n fail ArgumentError, 'invalid value for \"extended_ingredients\", number of items must be greater than or equal to 0.'\n end\n\n @extended_ingredients = extended_ingredients\n end",
"def value=(value)\n\t\tself.updated_at = Time.now\n\t\tif is_multi_object?\n\t\t\t(@value ||= []) << value\n\t\telse\n\t\t\t@value = value\n\t\tend\n\tend",
"def custom_workflow_extensions=(value)\n @custom_workflow_extensions = value\n end",
"def setExtractMetaTags(value)\n @fields['extract_meta_tags'] = value\n self\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions=(value)\n @extensions = value\n end",
"def extensions=(extensions)\n @extensions = Array extensions\n end",
"def modified_properties=(value)\n @modified_properties = value\n end",
"def set_attr_value( m_name, args )\n \n dup_m_name = m_name.gsub(/=$/, '')\n if( extended_attribute = find_in_extended_attr( dup_m_name ) )\n extended_attribute.update_attributes( :values => args )\n elsif( schema = find_attr_in_schema( dup_m_name ) )\n self.extended_attributes.build( :extended_attributes_schema => schema, :values => args )\n end\n \n end",
"def extensions=(value)\n subtags = Array(value).flatten\n self.extensions_sequence = subtags.empty? ? nil : subtags.join(HYPHEN)\n end",
"def extension_attribute7=(value)\n @extension_attribute7 = value\n end",
"def properties=(value)\n if value == @defaults['properties']\n @values.delete 'properties' if @values.key? 'properties'\n else\n @values['properties'] = value\n end\n end",
"def parse_extended_properties(extended_properties) # :nodoc\n query_parts = []\n ['shared', 'private'].each do |prop_type|\n next unless extended_properties[prop_type]\n query_parts << extended_properties[prop_type].map {|key, value| (prop_type == \"shared\" ? \"sharedExtendedProperty=\" : \"privateExtendedProperty=\") + \"#{key}%3D#{value}\" }.join(\"&\")\n end\n query_parts.join('&')\n end",
"def extension_attribute6=(value)\n @extension_attribute6 = value\n end",
"def extensions=(val)\n set_extensions(val)\n val\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def properties=(value)\n @properties = value\n end",
"def attachments=(value)\n @attachments = Array.new\n unless value.nil? || value.empty?\n value.each do |v1|\n if v1.instance_of? AttachmentJson\n @attachments.push(v1)\n end\n end\n end\n end",
"def values=(collection)\n @values = collection\n end",
"def extension_attribute14=(value)\n @extension_attribute14 = value\n end",
"def extension_attribute5=(value)\n @extension_attribute5 = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def attachments=(value)\n @attachments = value\n end",
"def extension_attribute2=(value)\n @extension_attribute2 = value\n end",
"def custom_extension_callout_instances=(value)\n @custom_extension_callout_instances = value\n end",
"def additional_tags=(value)\n @additional_tags = value\n end",
"def extension_attribute13=(value)\n @extension_attribute13 = value\n end",
"def nested_attributes_list_setter(meta, attributes_list)\n attributes_list = attributes_list.sort_by(&:to_s).map{|k,v| v} if attributes_list.is_a?(Hash)\n if (limit = meta[:limit]) && attributes_list.length > limit\n raise(Error, \"number of nested attributes (#{attributes_list.length}) exceeds the limit (#{limit})\")\n end\n attributes_list.each{|a| nested_attributes_setter(meta, a)}\n end",
"def set_Multiple(value)\n set_input(\"Multiple\", value)\n end",
"def custom_field_values_append=(values)\n send :custom_field_values=, values, '+'\n end",
"def payload_extended_options\n opts['Payload'] ? opts['Payload']['ExtendedOptions'] : nil\n end",
"def extension_attribute9=(value)\n @extension_attribute9 = value\n end",
"def extensions_json=(value)\n @extensions_json = value.to_s\n @extensions = JSON.parse( File.read(@extensions_json) ) # Ruby raises it's own generic I/O read errors & JSON parse errors\n \n @extensions = {} unless @extensions.is_a?(Hash)\n \n @extensions_json\n end",
"def extension_attribute1=(value)\n @extension_attribute1 = value\n end",
"def extension_attribute3=(value)\n @extension_attribute3 = value\n end",
"def set v\n if multiple?\n raise \"wrong number of values (#{v.length} for #{@min_count}..#{@max_count})\" if (v.length < min_count) || (max_count != :unlimited && v.length > max_count)\n @value = v.map {|w| single_value w }\n else\n @value = single_value v\n end\n self\n end",
"def value=(new_value)\n validate(new_value)\n if new_value.nil?\n property_names.each { |name| self[name] = nil }\n else\n property_names.each { |name| self[name] = new_value[name] }\n end\n end",
"def set_cycle_extended_support_fields(cycle)\n explode_date_or_boolean_field(cycle, 'extendedSupport', 'is_extended_support_over', 'extended_support_until', true)\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end"
] | [
"0.69510794",
"0.69510794",
"0.69510794",
"0.5983602",
"0.5983602",
"0.5983602",
"0.5511717",
"0.53901494",
"0.49919668",
"0.49919668",
"0.49919668",
"0.495143",
"0.49347976",
"0.483236",
"0.4754334",
"0.4748352",
"0.4748352",
"0.4748352",
"0.47416958",
"0.47303638",
"0.4647154",
"0.46021107",
"0.4584712",
"0.45438483",
"0.45272118",
"0.45227465",
"0.450106",
"0.44832656",
"0.44832656",
"0.44750944",
"0.4449416",
"0.44433236",
"0.44015545",
"0.43962613",
"0.4385104",
"0.4385104",
"0.4385104",
"0.43411657",
"0.43200788",
"0.43114918",
"0.43100318",
"0.43070585",
"0.4303058",
"0.43025935",
"0.43014237",
"0.4300316",
"0.42956755",
"0.42954692",
"0.428524",
"0.42830545",
"0.42806053",
"0.42782748",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886",
"0.42660886"
] | 0.81445426 | 2 |
Gets the newParticipants property value. Conversation participants that were added to the thread as part of this post. | def new_participants
return @new_participants
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new_participants=(value)\n @new_participants = value\n end",
"def participants_added # :nodoc:\n @properties[ADDED].map { |id| @context.users[id] }\n end",
"def getParticipants\r\n\t\t\t\t\treturn @participants\r\n\t\t\t\tend",
"def participants=(new_participants)\n @participants = new_participants\n self.participant_ids = new_participants.collect(&:id)\n end",
"def participants\n return @participants\n end",
"def participants\n return @participants\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end",
"def participants=(value)\n @participants = value\n end",
"def participants=(value)\n @participants = value\n end",
"def get_conversation_participants(id)\n @client.raw('get', \"/content/conversations/#{id}/participants\")\n end",
"def participants\n attributes['participants'] ||= []\n attributes['participants']\n end",
"def get_conversation_participants(id)\r\n #TODO: Test if this method needs data in options.\r\n @client.raw('get', \"/content/conversations/#{id}/participants\", nil, nil, @contact_v1_url)\r\n end",
"def participants\n @participants ||= ParticipantSet.new\n end",
"def participants_removed # :nodoc:\n @properties[REMOVED].map { |id| @context.users[id] }\n end",
"def participants # :nodoc:\n @participant_ids.map { |p| @context.users[p] }\n end",
"def participants\n return recipients\n end",
"def my_participant_id\n return @my_participant_id\n end",
"def participants\n recipients\n end",
"def participant_ids # :nodoc:\n @participant_ids.map { |id| id.dup }\n end",
"def participant_id\n return @participant_id\n end",
"def participant_id\n return @participant_id\n end",
"def get_challenge_event_participants(opts = {})\n data, _status_code, _headers = get_challenge_event_participants_with_http_info(opts)\n return data\n end",
"def set_participant_ids\n self.participant_ids = []\n self.participant_ids << user_id.to_s\n end",
"def add_participants_to_global_competition\n\t\tusers = User.where({:in_grand_competition=>true})\n\t\t\n\t\tinvitation_count = 0\n\t\tusers.each do |user|\n\t\t\tCompetitionParticipant.add_participant(user.id, self.id)\n\t\t\tinvitation_count += 1\n\t\tend\n\t\tAppMailer.global_race_admin_notify(self.id, users.length, invitation_count).deliver\n\t\t\n\t\treturn \"#{self.name} created. #{invitation_count}/#{users.length} users invited.\"\n\tend",
"def add_participant\n user = self.load_user(params)\n meeting = self.load_meeting(params)\n participant_ids = params[\"participant_ids\"]\n comment = params[\"comment\"].nil? ? \"\" : params[\"comment\"]\n\n if user != nil and meeting != nil and participant_ids.length > 0\n participant_ids.each do |participant_id|\n unless meeting.participants.exists?(participant_id)\n new_participant = User.find(participant_id)\n meeting.participants << new_participant\n # add default vote for the new added participant to each suggestion\n meeting.suggestions.each do |suggestion|\n suggestion.votes << Vote.new(:voter => new_participant, :decision => \"?\")\n end\n\n NotificationService.send_meeting_invitation(user, new_participant, meeting, comment)\n end\n end\n self.send_ok\n else\n self.send_error 401\n end\n end",
"def find_new_conversations\n #get chat rooms users where last_viewed is nil\n user_rooms = ChatRoomsUser.where(\"user_id = ?\", current_user.id)\n new_rooms = Array.new\n user_rooms.each do |room|\n if room.last_viewed.nil?\n new_rooms << ChatRoom.find_by(id: room.chat_room_id)\n end\n end\n new_rooms\n end",
"def nombreParticipants\n return @participants.length\n end",
"def conversation_participants=(conversation_participants)\n conversation_participants.each do |participant, is_sender|\n person_conversations.build(:person_id => participant,\n :is_read => is_sender,\n :last_sent_at => created_at)\n end\n end",
"def get_participants\n user = self.load_user(params)\n meeting = self.load_meeting(params)\n\n if user != nil and meeting != nil\n users = meeting.participants\n send_json(users)\n else\n send_error 401\n end\n end",
"def new_messages_count\n new_messages = self.received_messages.where(:new_message => true)\n if new_messages\n return new_messages.size\n else\n return 0\n end\n end",
"def num_participants\n self.participants.count\n end",
"def num_participants\n self.participants.count\n end",
"def setParticipants(participants)\r\n\t\t\t\t\t@participants = participants\r\n\t\t\t\tend",
"def conversation_params\n params.permit(:participants, participants: [])\n end",
"def get_participants(id, params = {})\n get \"/api/v2/projects/#{id}/participants\", params\n end",
"def new_messages\r\n self.unread_messages\r\n end",
"def new_messages\r\n self.unread_messages\r\n end",
"def participants\n User.find([self.thought.user_id,*thought.comments.map(&:user_id)].compact-[self.user_id])\n end",
"def conversation_member_roles\n return @conversation_member_roles\n end",
"def former_participants( params={} )\n former_participants = get_connections(\"former_participants\", params)\n return map_connections former_participants, :to => Facebook::Graph::Generic\n end",
"def index\n @participants = @current_event.participants rescue nil || []\n end",
"def my_participant_id=(value)\n @my_participant_id = value\n end",
"def participants_size\n participants.size\n end",
"def participants\n users.where(parent_id: parent_id || current_user_id).flat_map(&:participants)\n end",
"def past_meeting_participants(*args)\n options = Zoom::Params.new(Utils.extract_options!(args))\n options.require(%i[meeting_uuid])\n Utils.parse_response self.class.get(\"/past_meetings/#{options[:meeting_uuid]}/participants\", headers: request_headers)\n end",
"def post_conversations_call_participants(conversation_id, body, opts = {})\n data, _status_code, _headers = post_conversations_call_participants_with_http_info(conversation_id, body, opts)\n return data\n end",
"def getAddedPerson\r\n\t\t\t\t\treturn @addedPerson\r\n\t\t\t\tend",
"def participants\n @participants ||= AssignmentParticipant.find(:all, :conditions => ['parent_id = ? and user_id IN (?)', parent_id, users])\n end",
"def participants\n @participants ||= AssignmentParticipant.find(:all, :conditions => ['parent_id = ? and user_id IN (?)', parent_id, users])\n end",
"def participants\n @participants ||= AssignmentParticipant.find(:all, :conditions => ['parent_id = ? and user_id IN (?)', parent_id, users])\n end",
"def get_participants(response_status_number)\n\t\tmeeting_participations.where(response_status: response_status_number)\n\tend",
"def list_participants request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_list_participants_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Dialogflow::V2::ListParticipantsResponse.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def index\n @participants = @event.participants\n end",
"def participant_ids\n participants.pluck(:id)\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def creator_conversation_ids\n self.creator ?\n self.creator.conversation_ids + self.creator.created_conversation_ids : []\n end",
"def takers\n self.questions.first.participants\n end",
"def conversation_member_roles=(value)\n @conversation_member_roles = value\n end",
"def participant\n ScriptoriaCore::Ruote.engine.participant(participant_name)\n end",
"def check_new_user_many_replies\n return unless replying? && @user.posted_too_much_in_topic?(@details[:topic_id])\n\n {\n id: 'too_many_replies',\n templateName: 'education',\n body: PrettyText.cook(I18n.t('education.too_many_replies', newuser_max_replies_per_topic: SiteSetting.newuser_max_replies_per_topic))\n }\n end",
"def to_s\n '#<Twilio.FlexApi.V1.InteractionChannelParticipantList>'\n end",
"def property_conversations\n @property_conversations ||= sender.conversations_about(property)\n end",
"def ajoutParticipants(participant)\n @participants.push(participant)\n end",
"def participants\n expose Challenge.participants(@oauth_token, params[:challenge_id].strip)\n end",
"def after_update_actions\n if is_newbie_changed? && is_newbie == false # registration completed\n if Date.today.to_s == \"2017-03-03\" || Date.today.to_s == \"2017-03-04\"\n conv = Conversation.where(key: 'event_intellect').first\n unless conv.present?\n User.bot.conversations.create!(key: 'event_intellect', group_title: 'Intellect', new_members: [id])\n else\n conv.add_participant(id)\n end\n end\n end\n end",
"def selected_participants #:nodoc:\n return self.bids_dataset.list_subjects if self.implicit_all_participants?\n select_hash = params[:_cb_participants] || {}\n select_hash.keys.select { |sub| select_hash[sub] == '1' }.sort\n end",
"def new_user_and_participant\n @participant = Participant.new\n end",
"def members\n participants\n end",
"def members\n participants\n end",
"def to_s\n '#<Twilio.Api.V2010.ParticipantList>'\n end",
"def other_participants(user)\n all = recipients\n all.delete(user)\n all.delete(nil) # nil will appear when any of the user in the coversation is deleted later.\n all\n end",
"def additional_users_for_new\n []\n end",
"def new\n @participant_old = Participant.find(:first, :conditions => [ \"worker_id = ?\", params[:workerId]])\n \n if @participant_old == nil\n @participant = Participant.new\n @participant.assignment_id = params[:assignmentId]\n @participant.hit_id = params[:hitId]\n @participant.worker_id = params[:workerId]\n @participant.ip_addr = request.remote_ip\n @participant.cond = rand(2)\n else\n @participant = @participant_old.dup\n @participant.assignment_id = params[:assignmentId]\n @participant.hit_id = params[:hitId]\n @participant.ip_addr = request.remote_ip\n end\n\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @participant }\n end\n end",
"def to_s\n '#<Twilio.Api.V2010.ParticipantList>'\n end",
"def get_conversations_chat_participant_wrapup(conversation_id, participant_id, opts = {})\n data, _status_code, _headers = get_conversations_chat_participant_wrapup_with_http_info(conversation_id, participant_id, opts)\n return data\n end",
"def new_puzzle_ids\n\t\t@puzzle_ids - @puzzle_packet_ids\n\tend",
"def get_team_participants(a_team)\n\t\tparticipants_array = []\n\t\tusers.each do |u|\n\t\t\ta_team.users.each do |team_user|\n\t\t\t\tif u==team_user\n\t\t\t\t\tparticipants_array << u\n\t\t\t\t\tbreak\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\t\tparticipants_array\n\tend",
"def get_conversations_message_participant_wrapup(conversation_id, participant_id, opts = {})\n data, _status_code, _headers = get_conversations_message_participant_wrapup_with_http_info(conversation_id, participant_id, opts)\n return data\n end",
"def update_message\n return @num_new_notes != 0 ? \"#{@num_new_notes} new footnotes\" : nil\n end",
"def completed_awaiting_events\n candidate.awaiting_admin_events\n end",
"def chatting_with\n ChatMessage.participant(self).map do |chat|\n if chat.parent.id != self.id\n chat.parent\n else\n Parent.find(chat.recipient_fk)\n end\n end.uniq\n end",
"def new_mail\n poll_response['num_unread'].to_i\n end",
"def per_participant\n\n each_with_object({}) { |wi, h| (h[wi.participant_name] ||= []) << wi }\n end",
"def participants\n participant_ids.blank? ? User.none : User.where(id: participant_ids)\n end",
"def add_participant(_user_ids, _current_user = nil)\n update(new_members: _user_ids.is_a?(Array) ? _user_ids : [_user_ids], updated_by: _current_user)\n end",
"def get_conversation_participant_wrapup(conversation_id, participant_id, opts = {})\n data, _status_code, _headers = get_conversation_participant_wrapup_with_http_info(conversation_id, participant_id, opts)\n return data\n end",
"def contributors_added # :nodoc:\n @properties[ADDED].map { |id| @context.users[id] }\n end",
"def participants\n users = self.users\n participants = []\n users.each do |user|\n participant = AssignmentParticipant.find_by(user_id: user.id, parent_id: parent_id)\n participants << participant unless participant.nil?\n end\n participants\n end",
"def new_values\n @new_values ||= new_resource.value.split(new_resource.delim)\n end",
"def get_conversations_callback_participant_wrapup(conversation_id, participant_id, opts = {})\n data, _status_code, _headers = get_conversations_callback_participant_wrapup_with_http_info(conversation_id, participant_id, opts)\n return data\n end",
"def pending_friends\n self.pending_friends_by_me + self.pending_friends_for_me\n end",
"def get_new_messages\n get_messages_link_and_content\n end",
"def list\n\n get_list['list'].collect { |e| ParticipantEntry.new(e) }\n end",
"def newer\n @newer ||= rogueify(diff.newer).lines.to_a\n end",
"def support_for_old_conversation_params_api(conversation)\n unless params[:participants].nil?\n ids = conversation.conversation_members.pluck(:user_id)\n params[:new_members] = params[:participants] - ids\n params[:del_members] = ids - params[:participants]\n end\n \n unless params[:admin_ids].nil?\n params[:new_admins] = params[:admin_ids]\n params[:del_admins] = conversation.conversation_members.admin.pluck(:user_id) - params[:admin_ids]\n end\n end",
"def get_new_petitions\n @new_petitions = Petition.where(created_at: time_range).order('created_at ASC')\n end",
"def future_participations\n\tfuture_participations = []\n\n\tparticipations.each do |participation|\n\t\tif !participation.micropost.time || (participation.micropost.time && participation.micropost.time.future?)\n\t\t\tfuture_participations << participation.micropost\n\t\tend\n\tend\n\n\treturn future_participations\n end",
"def upcoming_events\n events = []\n if participant?\n participant.upcoming_events.each { |e| events << e }\n else\n events << \"Pregnancy Screener\"\n end\n events\n end",
"def new_promotions\n @new_promotions ||= []\n end"
] | [
"0.7143321",
"0.6168938",
"0.60920346",
"0.6035905",
"0.5810662",
"0.5810662",
"0.5790408",
"0.57590985",
"0.57590985",
"0.5481565",
"0.5471669",
"0.53882396",
"0.5254906",
"0.5252629",
"0.5200686",
"0.5183107",
"0.5143379",
"0.50829077",
"0.5081971",
"0.50524557",
"0.50524557",
"0.49965313",
"0.49219295",
"0.4910054",
"0.48651963",
"0.48614633",
"0.48499215",
"0.4845175",
"0.48418298",
"0.48399815",
"0.48210856",
"0.48210856",
"0.48094195",
"0.47750005",
"0.47701383",
"0.47368625",
"0.47368625",
"0.4733735",
"0.47098774",
"0.4679923",
"0.4679583",
"0.4669017",
"0.4666768",
"0.46618795",
"0.46562874",
"0.46505484",
"0.45959967",
"0.4594302",
"0.4594302",
"0.4594302",
"0.4564466",
"0.45568502",
"0.45521566",
"0.45477796",
"0.4537148",
"0.4537148",
"0.45201346",
"0.4509036",
"0.4501809",
"0.44993603",
"0.44712308",
"0.44568872",
"0.44160852",
"0.4415774",
"0.44107926",
"0.4392186",
"0.4388176",
"0.4381212",
"0.43765926",
"0.43765926",
"0.43641067",
"0.43604895",
"0.43538913",
"0.4349886",
"0.43454438",
"0.43418765",
"0.43365082",
"0.4323214",
"0.4316014",
"0.43155718",
"0.4311026",
"0.4308989",
"0.43016416",
"0.42942888",
"0.42941147",
"0.42912662",
"0.42895827",
"0.42791623",
"0.42777425",
"0.426818",
"0.42600423",
"0.4256163",
"0.42543557",
"0.42523336",
"0.42470133",
"0.4246152",
"0.42387474",
"0.42258397",
"0.42230764",
"0.42196977"
] | 0.7551031 | 0 |
Sets the newParticipants property value. Conversation participants that were added to the thread as part of this post. | def new_participants=(value)
@new_participants = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def participants=(new_participants)\n @participants = new_participants\n self.participant_ids = new_participants.collect(&:id)\n end",
"def setParticipants(participants)\r\n\t\t\t\t\t@participants = participants\r\n\t\t\t\tend",
"def participants=(value)\n @participants = value\n end",
"def participants=(value)\n @participants = value\n end",
"def new_participants\n return @new_participants\n end",
"def set_participant_ids\n self.participant_ids = []\n self.participant_ids << user_id.to_s\n end",
"def conversation_participants=(conversation_participants)\n conversation_participants.each do |participant, is_sender|\n person_conversations.build(:person_id => participant,\n :is_read => is_sender,\n :last_sent_at => created_at)\n end\n end",
"def add_participant\n user = self.load_user(params)\n meeting = self.load_meeting(params)\n participant_ids = params[\"participant_ids\"]\n comment = params[\"comment\"].nil? ? \"\" : params[\"comment\"]\n\n if user != nil and meeting != nil and participant_ids.length > 0\n participant_ids.each do |participant_id|\n unless meeting.participants.exists?(participant_id)\n new_participant = User.find(participant_id)\n meeting.participants << new_participant\n # add default vote for the new added participant to each suggestion\n meeting.suggestions.each do |suggestion|\n suggestion.votes << Vote.new(:voter => new_participant, :decision => \"?\")\n end\n\n NotificationService.send_meeting_invitation(user, new_participant, meeting, comment)\n end\n end\n self.send_ok\n else\n self.send_error 401\n end\n end",
"def add_participants(participants)\n participants.each do |p|\n self.add_default_contact_reason(p)\n end\n end",
"def ajoutParticipants(participant)\n @participants.push(participant)\n end",
"def conversation_params\n params.permit(:participants, participants: [])\n end",
"def set_activity_participant\n @activity_participant = current_user\n end",
"def post_conversations_chat_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversations_chat_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def post_conversations_message_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversations_message_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def add_participant(_user_ids, _current_user = nil)\n update(new_members: _user_ids.is_a?(Array) ? _user_ids : [_user_ids], updated_by: _current_user)\n end",
"def participant=(participant)\n ppl = self_link\n if ppl\n ppl.participant = participant\n else\n participant_person_links.build(:relationship_code => 1, :person => self, :participant => participant, :psu => self.psu)\n end\n end",
"def add_participants_to_global_competition\n\t\tusers = User.where({:in_grand_competition=>true})\n\t\t\n\t\tinvitation_count = 0\n\t\tusers.each do |user|\n\t\t\tCompetitionParticipant.add_participant(user.id, self.id)\n\t\t\tinvitation_count += 1\n\t\tend\n\t\tAppMailer.global_race_admin_notify(self.id, users.length, invitation_count).deliver\n\t\t\n\t\treturn \"#{self.name} created. #{invitation_count}/#{users.length} users invited.\"\n\tend",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def participants\n @participants ||= ParticipantSet.new\n end",
"def set_participant\n @participant = Participant.find_by_id(params[:id])\n end",
"def post_conversations_callback_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversations_callback_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def my_participant_id=(value)\n @my_participant_id = value\n end",
"def set_project_participant\n @project_participant = ProjectParticipant.find(params[:id])\n end",
"def after_update_actions\n if is_newbie_changed? && is_newbie == false # registration completed\n if Date.today.to_s == \"2017-03-03\" || Date.today.to_s == \"2017-03-04\"\n conv = Conversation.where(key: 'event_intellect').first\n unless conv.present?\n User.bot.conversations.create!(key: 'event_intellect', group_title: 'Intellect', new_members: [id])\n else\n conv.add_participant(id)\n end\n end\n end\n end",
"def new_user_and_participant\n @participant = Participant.new\n end",
"def change_preference_list\n\t\tcheck_if_myself\n\t\tuser = current_user\n\t\tnew_preference_list = user_pref_list_params[:preference_list]\n\t\tif new_preference_list != user.preference_list\n\t\t\tuser.update_attributes(user_pref_list_params)\n\t\t\tuser.save\n\t\t\tRecomputeMeetingParticipationsJob.perform_later (0..6).to_a, user\n\t\tend\n\t\tredirect_to settings_page_path\n\tend",
"def participant=(part)\n @participant = part\n end",
"def participants_added # :nodoc:\n @properties[ADDED].map { |id| @context.users[id] }\n end",
"def post_conversation_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversation_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def participant_id=(value)\n @participant_id = value\n end",
"def conversation_participant_ids\n self.conversation ?\n self.conversation.participant_ids + [self.conversation.user_id] : []\n end",
"def post_conversations_call_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversations_call_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n @decision = Decision.find(@participant.decision_id)\n @user = User.find(@participant.user_id)\n @comment = Comment.new\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n @event = @participant.event\n @organization = @participant.organization\n end",
"def post_conversations_email_participant_replace(conversation_id, participant_id, body, opts = {})\n post_conversations_email_participant_replace_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def set_participant\n @participant = Participant.find_by_login(params[:id])\n end",
"def post_conversations_call_participants(conversation_id, body, opts = {})\n data, _status_code, _headers = post_conversations_call_participants_with_http_info(conversation_id, body, opts)\n return data\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def set_participant\n @participant = Participant.find(params[:id])\n end",
"def patch_conversations_chat_participant_attributes(conversation_id, participant_id, body, opts = {})\n patch_conversations_chat_participant_attributes_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def update!(**args)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n @participants = args[:participants] if args.key?(:participants)\n end",
"def send_email_to_participants(request)\n unless RAILS_ENV == \"test\"\n recipients(last_message.sender).each do |recipient|\n if recipient.settings.email_when_new_comment == 1\n UserMailer.deliver_notification_of_new_message(recipient, last_message, request)\n end \n end\n end\n end",
"def update\n params[:interview][:participant_ids].each do |participant_id|\n unless participant_id.empty?\n participant = Participant.find(participant_id)\n @interview.participants << participant\n end\n end\n respond_to do |format|\n\n if @interview.update(interview_params)\n participants = @interview.participants\n emails = []\n participants.each do |p|\n emails += [p.email]\n ReminderMailer.update_email(p.email).deliver_now\n end\n format.html { redirect_to @interview, notice: 'Interview was successfully updated.' }\n format.json { render :show, status: :ok, location: @interview }\n else\n format.html { render :edit }\n format.json { render json: @interview.errors, status: :unprocessable_entity }\n end\n end\n end",
"def reply_to_activity(conversation_id, activity_id, new_activity)\n uri = \"/v3/conversations/#{conversation_id}/activities/#{activity_id}\"\n api_post(uri, new_activity.to_hash)\n end",
"def participant_ids # :nodoc:\n @participant_ids.map { |id| id.dup }\n end",
"def participants(*vals)\n\t\t\t@sprint.participants = Array(vals).map{|v| v.to_sym}\n\t\tend",
"def create\n @participant = Participant.new(participant_params)\n @participant.competences = @competence\n respond_to do |format|\n if @participant.save\n TeamParticipant.create(participant: @participant, team: @team)\n format.html { redirect_to participants_url }\n format.json { render :show, status: :created, location: @participant }\n else\n format.html { render :new }\n format.json { render json: @participant.errors, status: :unprocessable_entity }\n end\n end\n end",
"def set_challenge_participant\n @user = User.find_by(username: params[\"user_id\"])\n @challenge_participant = ChallengeParticipant.find_by(user_id: @user.id, challenge: @challenge.id)\n end",
"def remove_participant\n user = self.load_user(params)\n meeting = self.load_meeting(params)\n participant_ids = params[\"participant_ids\"]\n\n if user != nil and meeting != nil and participant_ids.length > 0\n participant_ids.each do |participant_id|\n if meeting.participants.exists?(participant_id)\n # remove all the participant's votes from each suggestion\n meeting.suggestions.each do |suggestion|\n vote = Vote.where(:voter_id => participant_id, :suggestion_id => suggestion.id)\n if vote != nil\n suggestion.votes.delete(vote)\n end\n end\n meeting.participants.delete(User.find(participant_id))\n end\n end\n self.send_ok\n else\n self.send_error 401\n end\n end",
"def new\n @participant_old = Participant.find(:first, :conditions => [ \"worker_id = ?\", params[:workerId]])\n \n if @participant_old == nil\n @participant = Participant.new\n @participant.assignment_id = params[:assignmentId]\n @participant.hit_id = params[:hitId]\n @participant.worker_id = params[:workerId]\n @participant.ip_addr = request.remote_ip\n @participant.cond = rand(2)\n else\n @participant = @participant_old.dup\n @participant.assignment_id = params[:assignmentId]\n @participant.hit_id = params[:hitId]\n @participant.ip_addr = request.remote_ip\n end\n\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @participant }\n end\n end",
"def past_meeting_participants(*args)\n options = Zoom::Params.new(Utils.extract_options!(args))\n options.require(%i[meeting_uuid])\n Utils.parse_response self.class.get(\"/past_meetings/#{options[:meeting_uuid]}/participants\", headers: request_headers)\n end",
"def create_group\n params[:new_members] = params[:participants]\n params[:new_admins] = params[:admin_ids]\n conversation = current_user.conversations.new(group_params)\n if conversation.save\n render_conversation(conversation)\n else\n render_error_model(conversation)\n end\n end",
"def get_conversation_participants(id)\r\n #TODO: Test if this method needs data in options.\r\n @client.raw('get', \"/content/conversations/#{id}/participants\", nil, nil, @contact_v1_url)\r\n end",
"def set_match_participant\n @match_participant = MatchParticipant.find(params[:id])\n end",
"def add_participant\n user = User.find(params[:user_id])\n\n unless user_is_initiator(current_user, @chat)\n return fail_response(['You are not an author of the conversation'], 403)\n end\n\n if user_related_to_chat(@chat, user)\n return fail_response(['User is already in chat'], 403)\n end\n\n @chat.add_participant(user)\n\n render json: { message: 'success' }, status: :ok\n end",
"def patch_conversations_email_participant_attributes(conversation_id, participant_id, body, opts = {})\n patch_conversations_email_participant_attributes_with_http_info(conversation_id, participant_id, body, opts)\n return nil\n end",
"def getParticipants\r\n\t\t\t\t\treturn @participants\r\n\t\t\t\tend",
"def create\n participants = make_participants\n @existing_chat_thread = current_user.chat_threads.where(\"participants = ?\", participants.to_yaml)\n if @existing_chat_thread.present?\n render json: @existing_chat_thread.first, serializer: Rest::ChatThreadSerializer\n return\n end\n @chat_thread = current_user.chat_threads.build({participants: participants})\n ActiveRecord::Base.transaction do\n @chat_thread.save!\n participants.each do |participant|\n ChatStatus.create(chat_thread_id: @chat_thread.id, user_id: participant)\n end\n end\n render json: @chat_thread, status: :created, serializer: Rest::ChatThreadSerializer\n rescue => e\n render json: @chat_thread.errors.full_messages, status: :unprocessable_entity\n end",
"def notify_participants participants\n participants.each do |participant|\n send_message participant.to_participant_message, participant.phone_number\n puts \"#{participant.name} has received a text message at #{participant.phone_number}\"\n end\n end",
"def create\n initiator = self.load_user(params)\n\n if initiator != nil\n new_meeting = Meeting.create\n new_meeting.initiator_id = initiator.id\n new_meeting.location = params[\"meeting\"][\"location\"]\n new_meeting.title = params[\"meeting\"][\"title\"]\n new_meeting.participants << initiator\n new_meeting.save!\n self.send_json(build_custom_meeting_json(meeting: new_meeting))\n else\n self.send_error 401\n end\n end",
"def new\n @participant = Participant.new\n end",
"def new\n @participant = Participant.new\n end",
"def new\n @participant = Participant.new\n end",
"def increase_new_chat_notifications\n increment!(:new_chat_notification)\n end",
"def set_members\n members_ids = params[:contest][:members].reject(&:empty?)\n @contest.members.destroy_all if params[:action] == \"update\"\n members_ids.each do |members_id|\n member = Member.create(:user_id => members_id.to_i, :invitable => @contest)\n #send notification\n reciver = User.find(members_id)\n notifications = reciver.notifications.unread \n if reciver.notification_setting.try(:new_update)\n Notification.create(recepient_id: members_id, user: current_user, body: \"#{current_user.screen_name } has invited you to join a contest #{@contest.topic} \", notificable: @contest, :accept => false, :is_acceptable=>true)\n PrivatePub.publish_to \"/profiles/new_#{members_id}\", \"jQuery('#all-notifications').html('#{notifications.count}'); jQuery('#all-notifications').addClass('push-notification');\"\n\n end\n end\n end",
"def add_owner_to_participants\n EventParticipant.create(user_id: self.user_id, event_id: self.id)\n end",
"def list_participants request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_list_participants_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Dialogflow::V2::ListParticipantsResponse.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def get_conversation_participants(id)\n @client.raw('get', \"/content/conversations/#{id}/participants\")\n end",
"def set_event\n @event = Event.find(params[:id])\n @participants = @event.players\n end",
"def patch_conversations_message_participant_attributes(conversation_id, participant_id, opts = {})\n patch_conversations_message_participant_attributes_with_http_info(conversation_id, participant_id, opts)\n return nil\n end",
"def former_participants( params={} )\n former_participants = get_connections(\"former_participants\", params)\n return map_connections former_participants, :to => Facebook::Graph::Generic\n end",
"def set_members\n members_ids = params[:message][:recipient_ids].reject(&:empty?)\n members_ids.each do |members_id|\n @message = current_user.messages.create(:conversation_id => members_id , :body => params[:message][:body])\n\n #send notification\n reciver = User.find(members_id)\n if reciver.notification_setting.try(:new_update)\n Notification.create(recepient_id: members_id, user: current_user, body: \"#{current_user.screen_name } has send a message #{@message.topic} \", notificable: @message, :accept => false)\n end\n end\n end",
"def participants_removed # :nodoc:\n @properties[REMOVED].map { |id| @context.users[id] }\n end",
"def post_conversations_cobrowsesession_participant_replace(conversation_id, participant_id, opts = {})\n post_conversations_cobrowsesession_participant_replace_with_http_info(conversation_id, participant_id, opts)\n return nil\n end",
"def participants\n attributes['participants'] ||= []\n attributes['participants']\n end",
"def update_participant_params\n setup_enclosing_resources\n passed_in_available_days = params[:participant][:available_days]\n if passed_in_available_days.blank?\n unavailable_days = (params[:participant][:collection_of_unavailable_days]).split\n else\n available_array = passed_in_available_days.split\n unavailable_days = @meeting.calc_tentative_days - available_array\n end\n params[:participant].delete(:available_days)\n params[:participant][:collection_of_unavailable_days] = unavailable_days.join(' ')\n end",
"def add_participant(participant)\n # Assume we are in a tournament, we'll want to look at the seeds and slot the higher seed as home\n self.logger.info(\"Adding #{participant.name} to match ##{self.id}\")\n self.logger.error(\"Cannot add participants when match has scores already\") and raise if self.home_score > 0 || self.away_score > 0\n self.logger.error(\"Cannot add participants; no free slots in match\") and raise if self.home_participant && self.away_participant\n\n current_participant = self.home_participant || self.away_participant\n current_seed = current_participant ? TeamSeason.where(participant: current_participant, season_id: self.season_id).first.division.to_i : nil\n participant_seed = TeamSeason.where(participant: participant, season_id: self.season_id).first.division.to_i\n if !current_seed || participant_seed < current_seed\n self.home_participant = participant\n self.away_participant = current_participant\n else\n self.home_participant = current_participant\n self.away_participant = participant\n end\n self.save!\n end",
"def create_leader(user)\n participant = Participant.new(school_id: self.id, user_id: user.id)\n participant.role_id = 1\n participant.accepted = 2\n participant.prereq = true\n participant.save\n end",
"def set_participant_type\n @participant_type = ParticipantType.find(params[:id])\n end",
"def update_participant request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_update_participant_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Dialogflow::V2::Participant.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def set_participant_document\n @participant_document = ParticipantDocument.find(params[:id])\n end",
"def change_organizer\n @activity = Activity.find(params[:activity_id])\n @participant = @activity.participants.find(params[:participant_id])\n @participant.is_organizer = params[:new_state]\n @participant.save\n\n message = if params[:new_state] == \"true\"\n I18n.t('activities.organizers.added', name: @participant.person.full_name)\n else\n I18n.t('activities.organizers.removed', name: @participant.person.full_name)\n end\n flash_message(:success, message)\n\n redirect_to edit_group_activity_path(@group, @activity, anchor: 'organizers-add')\n end",
"def new\n @participant = Participant.new \n end",
"def new\n @participant = Participant.new \n end",
"def update\n debugger\n @participants = Participant.find(params[:id])\n\n respond_to do |format|\n if @participants.update_attributes(params[:participant])\n AuditTrail.audit(\"Participant #{@participants.fullname} updated by #{current_user.login}\", edit_participant_url(@participants))\n flash[:notice] = 'Participants was successfully updated.'\n format.html { redirect_to(participants_url) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @participants.errors, :status => :unprocessable_entity }\n end\n end\n end"
] | [
"0.7037466",
"0.6376834",
"0.6308306",
"0.6308306",
"0.6022701",
"0.5907998",
"0.57874614",
"0.55644",
"0.534605",
"0.521189",
"0.5210973",
"0.51620924",
"0.51449126",
"0.50891286",
"0.5083837",
"0.5056697",
"0.5046132",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.5006741",
"0.49831977",
"0.4944799",
"0.4887971",
"0.48829594",
"0.4870105",
"0.48668006",
"0.48648286",
"0.48308766",
"0.4830402",
"0.48261562",
"0.47970593",
"0.47772998",
"0.47772998",
"0.47607937",
"0.47583932",
"0.47230685",
"0.47082677",
"0.4707684",
"0.4676322",
"0.46506208",
"0.46488178",
"0.46488178",
"0.4641333",
"0.46281546",
"0.46247905",
"0.46122542",
"0.4558233",
"0.45409116",
"0.4539681",
"0.45385587",
"0.45253152",
"0.4512483",
"0.45064366",
"0.4497748",
"0.44919685",
"0.4485503",
"0.4482415",
"0.44769496",
"0.44582093",
"0.44560444",
"0.4455216",
"0.4454363",
"0.4452788",
"0.44469428",
"0.44469428",
"0.44469428",
"0.44437075",
"0.4439499",
"0.44354132",
"0.44348928",
"0.44206396",
"0.44183657",
"0.44086134",
"0.4401202",
"0.4388032",
"0.43682978",
"0.43665653",
"0.4366033",
"0.4360152",
"0.43598717",
"0.43595386",
"0.43538076",
"0.43517643",
"0.43473586",
"0.43360272",
"0.43264163",
"0.43264163",
"0.43255365"
] | 0.7493142 | 0 |
Gets the receivedDateTime property value. Specifies when the post was received. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 20140101T00:00:00Z | def received_date_time
return @received_date_time
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def received_at\n params['_raw_orderCreatedDatetime']\n end",
"def message_received_date_time\n return @message_received_date_time\n end",
"def received_at\n request_datetime\n end",
"def received_at\n Time.parse(params['created']) if params['created']\n end",
"def received_at\n params['TIMESTAMP']\n end",
"def received_date\n @received_at.getlocal('-05:00').strftime('%Y-%m-%d-05:00')\n end",
"def received_at\n DateTime.parse(params['TRANSTIME']) if params['TRANSTIME']\n rescue ArgumentError\n nil\n end",
"def received_at\n params[:json]['message_time']\n end",
"def received_date_time=(value)\n @received_date_time = value\n end",
"def received_date_time=(value)\n @received_date_time = value\n end",
"def received_at\n Time.at(params['transTime'].to_i / 1000).utc\n end",
"def message_received_date_time=(value)\n @message_received_date_time = value\n end",
"def date_time\n @message[:date_time]\n end",
"def received_at\n\tTime.parse params['payment_date']\n end",
"def received_at\n return nil unless (temp_extended_received_at = read_attribute(:received_at))\n temp_received_at1 = encrypt_remove_pre_and_postfix(temp_extended_received_at, 'received_at', 5)\n temp_received_at2 = YAML::load(temp_received_at1)\n temp_received_at2 = temp_received_at2.to_time if temp_received_at2.class.name == 'Date'\n temp_received_at2\n end",
"def received_at\r\n params['TxTime']\r\n end",
"def received_at\n updated_at_for_status \"received\"\n end",
"def result\n DateTime.parse(value.sub(/^datetime-/, '')).utc\n end",
"def received_at\n params['date'] + params['time']\n end",
"def timestamp\n Time.parse(message.date.to_s).utc\n end",
"def received_at\n nil\n end",
"def received_at\n params['Process_date'] + params['Process_time']\n end",
"def posted\n # Will interpret the stored date as being in the timezone set in Time.zone\n Time.zone.parse(posted_before_type_cast)\n end",
"def observed_utc_offset\n @offset.observed_utc_offset\n end",
"def created_date_time_utc\n return @created_date_time_utc\n end",
"def received_at\n nil\n end",
"def received_at\n nil\n end",
"def received_at\n params['']\n end",
"def datetime_stamp\n Time.now.utc.iso8601\n end",
"def received_at\n params['']\n end",
"def returned_date_time\n return @returned_date_time\n end",
"def sender_date_time\n return @sender_date_time\n end",
"def sent_date_time\n return @sent_date_time\n end",
"def value\n if @date_time_value\n @date_time_value.strftime(\"%Y%m%dT%H%M%S#{tzid == \"UTC\" ? \"Z\" : \"\"}\")\n else\n nil\n end\n end",
"def datetime\n @datetime ||= if observed_on && errors[:observed_on].blank?\n time_observed_at_in_zone ||\n Time.new(observed_on.year,\n observed_on.month,\n observed_on.day, 0, 0, 0,\n timezone_offset)\n end\n end",
"def arrival_time\n DateTime.parse(@raw_data[:ArrivalDateTime])\n end",
"def getDateTime()\n return self.getLogDate.to_DateTime()\n end",
"def result\n DateTime.iso8601(value).utc\n end",
"def build_time\n return nil if received_at.blank?\n created_at - received_at\n end",
"def build_time\n return nil if self.received_at.blank?\n self.created_at - self.received_at\n end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def offset\n if @time\n @time.utc_offset\n elsif @datetime\n (3600*24*@datetime.offset).to_i\n else\n 0\n end\n end",
"def last_delivered_date_time\n return @last_delivered_date_time\n end",
"def created_at\n return DateTime.parse(@created_at) if @created_at\n\n @created_at\n end",
"def notification_daily_send_time\n @attributes[:notification_daily_send_time]\n end",
"def notification_daily_send_time\n @attributes[:notification_daily_send_time]\n end",
"def send_at\n raise InvalidSchema unless valid?\n\n @send_at ||= Time.zone.parse(raw_data[:send_at])\n end",
"def get_request_timestamp\n\t\treturn @transport.get_path(\"meta\",\"datetime\")\n\tend",
"def dispatched_at\n\n fields['dispatched_at']\n end",
"def created_at\n Time.parse(@created_at).utc unless @created_at.nil?\n end",
"def datetime_timestamp\n return Date.parse(self.date).to_time\n end",
"def time\n @time ||= incorporate_utc_offset(@utc, utc_offset)\n end",
"def created_date_time_utc=(value)\n @created_date_time_utc = value\n end",
"def utc_now()\n tz = TZInfo::Timezone.get('Etc/UTC')\n tz.now.to_datetime\n end",
"def to_datetime\n @to_datetime ||= utc.to_datetime.new_offset(Rational(utc_offset, 86_400))\n end",
"def getutc() end",
"def to_soap_value\n return to_s unless respond_to? :to_datetime\n to_datetime.to_soap_value\n end",
"def ruby_value\n to_datetime\n end",
"def created_time\n Time.parse(object[\"created_time\"]) if object[\"created_time\"]\n end",
"def utc\n @utc ||= incorporate_utc_offset(@time, -utc_offset)\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def created_date_time\n return @created_date_time\n end",
"def real_created_at\n self[:remote_created_at] || self[:created_at]\n end",
"def offset\n if @time\n @time.utc_offset\n elsif @datetime\n (3600*24*@datetime.offset).to_i\n elsif @timestamp_with_offset\n @timestamp_with_offset.utc_offset\n else\n 0\n end\n end",
"def utc\n @attributes.fetch('UTC', nil)\n end",
"def timestamp\n @data['when'].to_time\n end",
"def event_date_time\n return @event_date_time\n end",
"def httpdate\n utc.httpdate\n end",
"def processed_at\n @data.has_key?('processed_at') ? Time.parse(data['processed_at']) : nil\n end",
"def system_datetime_millis\r\n response = self.system_datetime()\r\n if response.error?\r\n raise IOError, \"Time lookup failed: #{response.status_code} - #{response.status_message}.\"\r\n end\r\n content = response.content\r\n time_value = nil\r\n if @data_format == DataFormat::XML\r\n time_value = parse_xml_tag(content, ['systemDateTime', 'value'])\r\n else\r\n json_data = JSON.parse(content)\r\n time_value = json_data['systemDateTime']['value']\r\n end\r\n\r\n datetime_to_millis(time_value)\r\n end",
"def created_at_datetime\n @created_at_datetime ||= DateTime.parse(@created_at)\n end"
] | [
"0.7229156",
"0.69821215",
"0.6949472",
"0.68040156",
"0.6638946",
"0.6619124",
"0.651624",
"0.646386",
"0.6388152",
"0.6388152",
"0.6385655",
"0.6303818",
"0.6279794",
"0.6250664",
"0.62407196",
"0.6234332",
"0.620122",
"0.6158274",
"0.61064506",
"0.607306",
"0.60000485",
"0.5980977",
"0.59747654",
"0.59713835",
"0.59476644",
"0.5930176",
"0.5930176",
"0.5925337",
"0.59073776",
"0.58850926",
"0.58640933",
"0.5842859",
"0.583683",
"0.5822571",
"0.58113074",
"0.57975286",
"0.5776627",
"0.57543385",
"0.56897146",
"0.56820357",
"0.56326294",
"0.56326294",
"0.56326294",
"0.56326294",
"0.55818784",
"0.5560158",
"0.55397177",
"0.55383193",
"0.55383193",
"0.55129254",
"0.546071",
"0.5450787",
"0.54119575",
"0.54056424",
"0.5399976",
"0.53987163",
"0.53972465",
"0.5389492",
"0.537154",
"0.53620017",
"0.5355479",
"0.53411376",
"0.5336025",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5333763",
"0.5332945",
"0.5325655",
"0.5324579",
"0.53229314",
"0.53215003",
"0.5314726",
"0.5301645",
"0.5294947",
"0.5290753",
"0.52895975"
] | 0.74083483 | 1 |
Sets the receivedDateTime property value. Specifies when the post was received. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 20140101T00:00:00Z | def received_date_time=(value)
@received_date_time = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def message_received_date_time=(value)\n @message_received_date_time = value\n end",
"def received_at\n params['_raw_orderCreatedDatetime']\n end",
"def received_at\n Time.parse(params['created']) if params['created']\n end",
"def received_date_time\n return @received_date_time\n end",
"def received_date_time\n return @received_date_time\n end",
"def created_date_time_utc=(value)\n @created_date_time_utc = value\n end",
"def sent_date_time=(value)\n @sent_date_time = value\n end",
"def received_at\n request_datetime\n end",
"def received_at\n DateTime.parse(params['TRANSTIME']) if params['TRANSTIME']\n rescue ArgumentError\n nil\n end",
"def received_date\n @received_at.getlocal('-05:00').strftime('%Y-%m-%d-05:00')\n end",
"def update_received_modified\n self.received_at = Time.now if received_changed?\n end",
"def stamp\n @options[:last_received_time] = Time.now\n self\n end",
"def sender_date_time=(value)\n @sender_date_time = value\n end",
"def created_date_time=(createdDateTime)\n @created_date_time = createdDateTime\n end",
"def received_at\n\tTime.parse params['payment_date']\n end",
"def received_at\n Time.at(params['transTime'].to_i / 1000).utc\n end",
"def received_at\n params['date'] + params['time']\n end",
"def received_at\n params['TIMESTAMP']\n end",
"def send_at\n raise InvalidSchema unless valid?\n\n @send_at ||= Time.zone.parse(raw_data[:send_at])\n end",
"def received_at\n nil\n end",
"def message_received_date_time\n return @message_received_date_time\n end",
"def updated_at=(value)\n @updated_at = DateTime.parse(value)\n end",
"def received_at\n nil\n end",
"def received_at\n nil\n end",
"def created_at=(value)\n @created_at = DateTime.parse(value)\n end",
"def last_delivered_date_time=(value)\n @last_delivered_date_time = value\n end",
"def received_at\n params[:json]['message_time']\n end",
"def received_at\n params['Process_date'] + params['Process_time']\n end",
"def posted\n # Will interpret the stored date as being in the timezone set in Time.zone\n Time.zone.parse(posted_before_type_cast)\n end",
"def received_at\n return nil unless (temp_extended_received_at = read_attribute(:received_at))\n temp_received_at1 = encrypt_remove_pre_and_postfix(temp_extended_received_at, 'received_at', 5)\n temp_received_at2 = YAML::load(temp_received_at1)\n temp_received_at2 = temp_received_at2.to_time if temp_received_at2.class.name == 'Date'\n temp_received_at2\n end",
"def receive_object(metadata, payload)\n @last_received = Time.now\n end",
"def date_time=(date_time)\n @date_time = DateTime.parse(date_time)\n end",
"def received_at\r\n params['TxTime']\r\n end",
"def set_delivered_at\n self.delivered_at = Date.today\n end",
"def pay_date= datetime\n if datetime\n datetime = User.current.system_time(datetime)\n end\n write_attribute(:pay_date, datetime)\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def created_date_time=(value)\n @created_date_time = value\n end",
"def received_at\n updated_at_for_status \"received\"\n end",
"def created_at=(val)\n if val.is_a?(String)\n @created_at = Time.parse(val)\n else\n @created_at = val\n end\n end",
"def returned_date_time=(value)\n @returned_date_time = value\n end",
"def processed_date_time=(value)\n @processed_date_time = value\n end",
"def processed_date_time=(value)\n @processed_date_time = value\n end",
"def requested_at=(val)\n @requested_at = TicketSharing::Time.new(val)\n end",
"def datetime\n @datetime ||= if observed_on && errors[:observed_on].blank?\n time_observed_at_in_zone ||\n Time.new(observed_on.year,\n observed_on.month,\n observed_on.day, 0, 0, 0,\n timezone_offset)\n end\n end",
"def reminder_date_time=(value)\n @reminder_date_time = value\n end",
"def mark_as_received(date = Date.today)\n self.quantity_received = self.quantity_ordered\n self.date_received = date\n end",
"def upload_date_time=(value)\n @upload_date_time = value\n end",
"def to_datetime\n @to_datetime ||= utc.to_datetime.new_offset(Rational(utc_offset, 86_400))\n end",
"def last_seen_date_time=(value)\n @last_seen_date_time = value\n end",
"def last_seen_date_time=(value)\n @last_seen_date_time = value\n end",
"def last_seen_date_time=(value)\n @last_seen_date_time = value\n end",
"def set_time_in_time_zone\n return true if time_observed_at.blank? || time_zone.blank?\n return true unless time_observed_at_changed? || time_zone_changed?\n \n # Render the time as a string\n time_s = time_observed_at_before_type_cast\n unless time_s.is_a? String\n time_s = time_observed_at_before_type_cast.strftime(\"%Y-%m-%d %H:%M:%S\")\n end\n \n # Get the time zone offset as a string and append it\n offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false)\n time_s += \" #{offset_s}\"\n \n self.time_observed_at = Time.parse(time_s)\n true\n end",
"def unsubmitted_date_time=(value)\n @unsubmitted_date_time = value\n end",
"def value=(val) # :nodoc:\n case val\n when nil\n @date_time_value = nil\n when String\n self.tzid = 'UTC' if val =~/Z/\n @date_time_value = ::DateTime.parse(val)\n when ::DateTime\n @date_time_value = val\n when ::Date, ::Time\n @date_time_value = ::DateTime.parse(val.to_s)\n end\n end",
"def set_record_created_at\n self.record_created_at = Time.current.utc.iso8601(3)\n end",
"def set_ToDateTime(value)\n set_input(\"ToDateTime\", value)\n end",
"def set_ToDateTime(value)\n set_input(\"ToDateTime\", value)\n end",
"def set_ToDateTime(value)\n set_input(\"ToDateTime\", value)\n end",
"def set_ToDateTime(value)\n set_input(\"ToDateTime\", value)\n end",
"def set_ToDateTime(value)\n set_input(\"ToDateTime\", value)\n end",
"def last_heartbeat_date_time=(value)\n @last_heartbeat_date_time = value\n end",
"def last_heartbeat_date_time=(value)\n @last_heartbeat_date_time = value\n end",
"def reviewed_date_time=(value)\n @reviewed_date_time = value\n end",
"def published_date_time=(value)\n @published_date_time = value\n end",
"def published_date_time=(value)\n @published_date_time = value\n end",
"def updated_at_datetime\n @updated_at_datetime ||= DateTime.parse(@updated_at)\n end",
"def timestamp\n Time.parse(message.date.to_s).utc\n end",
"def utc\n @utc ||= incorporate_utc_offset(@time, -utc_offset)\n end",
"def set_created_at\n if !timeless? && !created_at\n time = Time.configured.now\n self.updated_at = time if is_a?(Updated) && !updated_at_changed?\n self.created_at = time\n end\n clear_timeless_option\n end",
"def set_date_time=(date_time)\n\t\t@date_time=DateTime.now\n\tend",
"def set_posted_at\n if self.is_public? && self.is_public_changed?\n # becoming public\n self.posted_at = Time.now\n\n elsif !self.is_public? && self.is_public_changed?\n # loosing public\n self.posted_at = nil\n\n end\n\n return true\n end",
"def received_at\n params['']\n end",
"def datetime_stamp\n Time.now.utc.iso8601\n end",
"def event_date_time=(value)\n @event_date_time = value\n end"
] | [
"0.6847314",
"0.6300801",
"0.6121688",
"0.6041164",
"0.6041164",
"0.6032692",
"0.5979375",
"0.59409666",
"0.5921429",
"0.5868537",
"0.58281744",
"0.5805354",
"0.5766811",
"0.5714113",
"0.5668339",
"0.5626505",
"0.559062",
"0.5590175",
"0.55610156",
"0.5471812",
"0.5467572",
"0.54628634",
"0.5451115",
"0.5451115",
"0.53757143",
"0.5372808",
"0.5369185",
"0.5306021",
"0.5305753",
"0.52749616",
"0.52709347",
"0.5237669",
"0.5229131",
"0.5227317",
"0.5226306",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5222758",
"0.5200174",
"0.5183804",
"0.5176469",
"0.5124363",
"0.5124363",
"0.50992554",
"0.5098589",
"0.50412387",
"0.5020511",
"0.49974087",
"0.49682403",
"0.4963665",
"0.4963665",
"0.4963665",
"0.49491942",
"0.49454117",
"0.49449372",
"0.4933486",
"0.49101183",
"0.49101183",
"0.49101183",
"0.49101183",
"0.49101183",
"0.4908154",
"0.4908154",
"0.49017462",
"0.48829567",
"0.48829567",
"0.4881938",
"0.4881722",
"0.48609787",
"0.48609564",
"0.48600933",
"0.48554805",
"0.48554757",
"0.48267493",
"0.48220432"
] | 0.71871555 | 1 |
Gets the sender property value. Contains the address of the sender. The value of Sender is assumed to be the address of the authenticated user in the case when Sender is not specified. This is a default property. | def sender
return @sender
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sender_address\n @sender_address || ''\n end",
"def sender(sender_id)\n if sender = User.find(sender_id)\n address = default_sender_address\n address.display_name = sender.name\n address.format\n end\n end",
"def sender\n begin\n User.cached_find(self.sender_id)\n rescue\n nil\n end\n end",
"def sender\n @payload['sender']\n end",
"def sender_message\n return @sender_message\n end",
"def sender\n @sender ||= User.find( @sender_id )\n end",
"def sender_email\n msg['from_email'] || msg['sender'] || entry['sender'] || reject['sender']\n end",
"def sender\n @payload[:sender]\n end",
"def sender_number\n if self.respond_to? :carrier\n return self[:number]\n else \n return self[:from]\n end\n end",
"def sender_user_id\n return @sender_user_id\n end",
"def sender\n request.from.first.to_s\n end",
"def default_sender\n DEFAULT_SENDER\n end",
"def sender_name\n @stanza.from.resource\n end",
"def sender_address\n case from\n when Hash; Hash[*from.first] if from.size > 1\n when Array; from.first if from.size > 1\n end\n end",
"def sender=(value)\n @sender = value\n end",
"def sender=(value)\n @sender = value\n end",
"def text_message_default_sender\n current_params.fetch(:from)\n end",
"def originator\n @originator = self.original_message.sender if @originator.nil?\n return @originator\n end",
"def sender=(hostname)\n @message[:sender] = hostname\n end",
"def mail_from_project_sender\n sender = if @issue\n p = @issue.project\n s = CustomField.find_by_name('project-sender-email')\n p.custom_value_for(s).try(:value) if p.present? && s.present?\n end\n (sender.present? && sender) || Setting.mail_from\n end",
"def sender_message=(value)\n @sender_message = value\n end",
"def recipient(current_user)\n \tself.sender_id == current_user.id ? self.receiver : self.sender\n \tend",
"def sender\n user = User.find_by_id(sent_by)\n \"#{user.first_name} #{user.last_name}\"\n end",
"def sender=(sender)\n @sender = ensure_type(Sender, sender)\n end",
"def sender=(sender)\n @sender = ensure_type(Sender, sender)\n end",
"def from=(address)\n self.sender = address\n end",
"def user\n if scope.id == object.sender_user_id\n object.recipient_user\n else\n object.sender_user\n end\n end",
"def sender=( val )\n header[:sender] = val\n end",
"def originator\n @originator ||= original_message.sender\n end",
"def the_sender_name\n sender.full_name(false, created_at)\n end",
"def sender_user_id=(value)\n @sender_user_id = value\n end",
"def update_sender\n if !@from_name.blank? and from_email_address and !@from_email_address.empty? and !(@from_name.to_s == @from_email_address.to_s )\n self.sender = \"#{@from_name} <#{@from_email_address}>\"\n else\n self.sender = @from_email_address.to_s\n end\n end",
"def mail_from\n if @yaml[\"mail\"][\"from\"] != \"example@example.com\"\n return @yaml[\"mail\"][\"from\"]\n end\n end",
"def src_ip\n self[:sender_ip]\n end",
"def ezm_sender_or_receiver(message)\r\n if session[:mail_box] == \"outbox\"\r\n message.receiver_name\r\n # Used for both inbox and trashbin\r\n else\r\n message.sender_name\r\n end\r\n end",
"def ezm_sender_or_receiver_label\r\n if session[:mail_box] == \"outbox\"\r\n \"Recipient\"\r\n # Used for both inbox and trashbin\r\n else\r\n \"Sender\"\r\n end\r\n end",
"def type\n \"Sender\"\n end",
"def to_sender\n if self.sender\n self.sender\n else\n Person.new({\n :gender => self.sender_gender,\n :academic_title_id => self.sender_academic_title_id,\n :first_name => self.sender_first_name,\n :last_name => self.sender_last_name,\n :email => self.sender_email\n })\n end\n end",
"def sender\n ENV['NOTIFICATION_FROM_EMAIL'] || 'noreply@some.gov'\n end",
"def last_sender\n @last_sender = self.last_message.sender if @last_sender.nil?\n return @last_sender\n end",
"def recipient\n return @recipient\n end",
"def sender\n sender = User.where(:uid => sender_uid)\n sender.first\n end",
"def recipient\n return @recipient\n end",
"def sender\n self.user_type.constantize.find(self.user_id)\n end",
"def get_email_recipient\n user == chat.sender ? chat.recipient : chat.sender\n end",
"def sender\r\n SenderController.instance\r\n end",
"def serialized_sender\n user = User.current_user || donor\n Api::V1::UserSerializer.new(user)\n end",
"def smtp_envelope_from( val = nil )\n if val\n self.smtp_envelope_from = val\n else\n @smtp_envelope_from || return_path || sender || from_addrs.first\n end\n end",
"def from\n @_from || payload && payload['from']\n end",
"def src_mac\n self[:sender_mac]\n end",
"def recipient_email\n return @recipient_email\n end",
"def recipient_email\n return @recipient_email\n end",
"def set_from_email_address\n self.from_user_email ||= Settings::NotificationsFromEmail || user&.email\n end",
"def last_sender\n @last_sender ||= last_message.sender\n end",
"def default_sender_address\n address = Mail::Address.new(Gitlab.config.gitlab.email_from)\n address.display_name = \"GitLab\"\n address\n end",
"def from\n @property[:from]\n end",
"def with(current_user)\n \tsender == current_user ? recipient : sender\n\tend",
"def recipient_name\n recipient.name if recipient\n end",
"def from_name\n site.email_from_name || site.title\n end",
"def email\n\t\tif self.email_address\n\t\t\tself.email_address.email\n\t\telse\n\t\t\tself.user.email\n\t\tend\n\tend",
"def sender( val = nil )\n default :sender, val\n end",
"def from\n address = @mail[:from].formatted\n Mail::Address.new(address.first)\n end",
"def smtp_envelope_from=( val )\n @smtp_envelope_from = val\n end",
"def sms_email_address\n carrier.sms_email_address(self) if carrier\n end",
"def other_user(user)\n user == sender ? recipient : sender\n end",
"def from_address\n site.email_from_address || site.support_email\n end",
"def safe_mail_from\n name = self.mail.from_name_if_present\n if name.nil?\n return nil\n end\n name = name.dup\n self.info_request.apply_censor_rules_to_text!(name)\n return name\n end",
"def mailboxer_email(object)\n self.email\n end",
"def reply_to\n (@reply_to || self.from)\n end",
"def sent_to_me\n return @sent_to_me\n end",
"def mailboxer_email(object)\n return email\n end",
"def receiver_email\n nil\n end",
"def not_current_user(message)\n\t\tmessage.sender == current_user ? message.recipient : message.sender\n\tend",
"def sender? usr\n usr.id == user_id\n end",
"def me\n MailboxUser.find_user((@@user.include?('@') ? @@user : \"#{@@user}@\"))\n end",
"def from\n @_from ||= payload.is_a?(Hash) ? payload['from'] : payload.try(:from)\n end",
"def get_sender(sender_id, opts = {})\n data, _status_code, _headers = get_sender_with_http_info(sender_id, opts)\n data\n end",
"def originator\n self.user ? self.user.name : NOT_SET\n end",
"def primary_smtp_address\n return @primary_smtp_address\n end",
"def hubssolib_get_user_address\n user = self.hubssolib_current_user\n user ? user.user_email : nil\n end",
"def receiver_name\n if self.has_registered_receiver?\n \"#{self.receiver.name}\"\n else\n self.to_receiver.name_and_email\n end\n end",
"def mail_nickname\n return @mail_nickname\n end",
"def recipient=(value)\n @recipient = value\n end",
"def email\n self[:emailAddress]\n end",
"def configured_sender( sender_type )\n\t\treturn Observability::Sender.create( sender_type ) if sender_type\n\t\treturn Observability::Sender.configured_type\n\tend",
"def person_for sender\n room.people[sender.stripped.to_s] || Person.new(identity: sender)\n end",
"def real_sender()\n\t\tself.received.last\n\tend",
"def sender_obscured\n if sender.blank? or !topica_message_id.blank?\n return sender\n end\n \n sender_parts = sender.split(\"@\")\n if sender_parts.size > 1\n user_name = sender_parts.first\n if user_name.length > 2\n return user_name[0..(user_name.length - 3)] + \"..@\" + sender_parts.last\n else\n return \"..@\" + sender_parts.last\n end\n end\n \n return sender\n end",
"def sender_contains\n return @sender_contains\n end",
"def message_user\n return self.username\n end",
"def email\n (@session[:current_user_details] || {})[:email]\n end",
"def receiver_email\n nil\n end",
"def sender?\n !!self.sender\n end",
"def contact_email\n\t\treturn get_configuration['sa_contact_email']\n\tend",
"def state_author\n if self.originator\n return User.find_by_id(self.originator).email || 'Unknown'\n end\n return 'None'\n end",
"def rezm_sender_or_receiver_label\n if params[:action] == \"outbox\"\n \"Recipient\"\n # Used for both inbox and trashbin\n else\n \"Sender\"\n end\n end",
"def mailboxer_email(object)\n email\n end",
"def mailboxer_email(object)\n email\n end",
"def mailboxer_name\n self.name\n end"
] | [
"0.7677934",
"0.764343",
"0.75346863",
"0.74634045",
"0.7427191",
"0.73489726",
"0.7190978",
"0.7184124",
"0.71835005",
"0.7183047",
"0.7051706",
"0.68450606",
"0.6840625",
"0.6823984",
"0.6823034",
"0.6823034",
"0.6792492",
"0.6655992",
"0.65891725",
"0.65613836",
"0.65260464",
"0.6518768",
"0.6493279",
"0.6449567",
"0.6449567",
"0.64467496",
"0.6446645",
"0.64246637",
"0.64022315",
"0.6347211",
"0.63248014",
"0.63241583",
"0.6314766",
"0.62412643",
"0.6235685",
"0.6205639",
"0.61742246",
"0.6172247",
"0.61674863",
"0.6149906",
"0.6144887",
"0.6127045",
"0.6124986",
"0.6118234",
"0.6102199",
"0.60834485",
"0.607165",
"0.60574985",
"0.60528237",
"0.60506004",
"0.60505587",
"0.60505587",
"0.6045582",
"0.60324794",
"0.60225785",
"0.59979844",
"0.58801496",
"0.5877574",
"0.58402777",
"0.5825612",
"0.58249843",
"0.5820787",
"0.58008033",
"0.5798897",
"0.5778336",
"0.57488394",
"0.5734249",
"0.57304674",
"0.57291347",
"0.5722061",
"0.5718144",
"0.5716137",
"0.5664897",
"0.56574327",
"0.565487",
"0.56480455",
"0.5633487",
"0.5631555",
"0.5630318",
"0.56296265",
"0.5618086",
"0.56143427",
"0.5605967",
"0.56059104",
"0.5605446",
"0.5603644",
"0.5596851",
"0.55817026",
"0.5580484",
"0.55792063",
"0.5574109",
"0.55722964",
"0.5570931",
"0.5546448",
"0.5540384",
"0.5532031",
"0.5522548",
"0.5522548",
"0.5520507"
] | 0.70061344 | 12 |
Sets the sender property value. Contains the address of the sender. The value of Sender is assumed to be the address of the authenticated user in the case when Sender is not specified. This is a default property. | def sender=(value)
@sender = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sender=(sender)\n @sender = ensure_type(Sender, sender)\n end",
"def sender=(sender)\n @sender = ensure_type(Sender, sender)\n end",
"def from=(address)\n self.sender = address\n end",
"def sender=(hostname)\n @message[:sender] = hostname\n end",
"def sender(sender_id)\n if sender = User.find(sender_id)\n address = default_sender_address\n address.display_name = sender.name\n address.format\n end\n end",
"def sender=( val )\n header[:sender] = val\n end",
"def update_sender\n if !@from_name.blank? and from_email_address and !@from_email_address.empty? and !(@from_name.to_s == @from_email_address.to_s )\n self.sender = \"#{@from_name} <#{@from_email_address}>\"\n else\n self.sender = @from_email_address.to_s\n end\n end",
"def sender_message=(value)\n @sender_message = value\n end",
"def set_from_email_address\n self.from_user_email ||= Settings::NotificationsFromEmail || user&.email\n end",
"def sender_user_id=(value)\n @sender_user_id = value\n end",
"def sender\n @sender ||= User.find( @sender_id )\n end",
"def sender_address\n @sender_address || ''\n end",
"def smtp_envelope_from=( val )\n @smtp_envelope_from = val\n end",
"def default_sender\n DEFAULT_SENDER\n end",
"def set_sender(sender_id)\n fb_user = Messenger::Client.get_user_profile(sender_id)\n @customer = Client.find_or_create_by(name: fb_user[\"first_name\"], last_name: fb_user[\"last_name\"], picture: fb_user[\"profile_pic\"], sender_id: sender_id)\n end",
"def email=(value)\n self.from_email = value\n end",
"def sender=(cm)\n Ruby.primitive :sendsite_set_sender\n raise PrimitiveFailure, \"primitive failed\"\n end",
"def set_recipient\n end",
"def recipient=(value)\n @recipient = value\n end",
"def sender\n begin\n User.cached_find(self.sender_id)\n rescue\n nil\n end\n end",
"def patch_sender(sender_id, sender_request, opts = {})\n data, _status_code, _headers = patch_sender_with_http_info(sender_id, sender_request, opts)\n data\n end",
"def sent_to_me=(value)\n @sent_to_me = value\n end",
"def recipient=(value)\n @recipient = value\n end",
"def resent_sender=( val )\n header[:resent_sender] = val\n end",
"def sender_user_id\n return @sender_user_id\n end",
"def text_message_default_sender\n current_params.fetch(:from)\n end",
"def smtp_envelope_from( val = nil )\n if val\n self.smtp_envelope_from = val\n else\n @smtp_envelope_from || return_path || sender || from_addrs.first\n end\n end",
"def sender_email\n msg['from_email'] || msg['sender'] || entry['sender'] || reject['sender']\n end",
"def sender\n @payload['sender']\n end",
"def with_sender(sender)\n @http_sender = sender\n self\n end",
"def sender_address\n case from\n when Hash; Hash[*from.first] if from.size > 1\n when Array; from.first if from.size > 1\n end\n end",
"def in_reply_to=(value)\n @in_reply_to = value\n end",
"def to_sender\n if self.sender\n self.sender\n else\n Person.new({\n :gender => self.sender_gender,\n :academic_title_id => self.sender_academic_title_id,\n :first_name => self.sender_first_name,\n :last_name => self.sender_last_name,\n :email => self.sender_email\n })\n end\n end",
"def sender_number\n if self.respond_to? :carrier\n return self[:number]\n else \n return self[:from]\n end\n end",
"def from(email = false)\n @from = email if email\n @from\n end",
"def reply_to=(value)\n @reply_to = value\n end",
"def sender( val = nil )\n default :sender, val\n end",
"def sender\n return @sender\n end",
"def sender\n return @sender\n end",
"def setsender(msgid, sender)\n req(\"setsender\", \"\", sender, msgid, \"\")\n end",
"def sender\n request.from.first.to_s\n end",
"def mail_from_project_sender\n sender = if @issue\n p = @issue.project\n s = CustomField.find_by_name('project-sender-email')\n p.custom_value_for(s).try(:value) if p.present? && s.present?\n end\n (sender.present? && sender) || Setting.mail_from\n end",
"def from=(from)\n write_attr :from, from\n end",
"def mbox_from=(value)\n @mbox_from = value\n end",
"def default_sender_address\n address = Mail::Address.new(Gitlab.config.gitlab.email_from)\n address.display_name = \"GitLab\"\n address\n end",
"def sent_to_or_cc_me=(value)\n @sent_to_or_cc_me = value\n end",
"def recipient(current_user)\n \tself.sender_id == current_user.id ? self.receiver : self.sender\n \tend",
"def originator\n @originator ||= original_message.sender\n end",
"def sender\n @payload[:sender]\n end",
"def sender_message\n return @sender_message\n end",
"def resent_sender( val = nil )\n default :resent_sender, val\n end",
"def recipient_email=(value)\n @recipient_email = value\n end",
"def recipient_email=(value)\n @recipient_email = value\n end",
"def sender\n user = User.find_by_id(sent_by)\n \"#{user.first_name} #{user.last_name}\"\n end",
"def with(current_user)\n \tsender == current_user ? recipient : sender\n\tend",
"def process_mail_from sender\n if @state.include? :mail_from\n @state -= [:mail_from, :rcpt, :data]\n receive_reset\n end\n\n super\n end",
"def sender_name\n @stanza.from.resource\n end",
"def type\n \"Sender\"\n end",
"def configured_sender( sender_type )\n\t\treturn Observability::Sender.create( sender_type ) if sender_type\n\t\treturn Observability::Sender.configured_type\n\tend",
"def set_from\n @from = From.find(params[:id])\n end",
"def process_mail_from sender\n if (@@parms[:starttls]==:required and !@state.include?(:starttls))\n send_data \"550 This server requires STARTTLS before MAIL FROM\\r\\n\"\n elsif (@@parms[:auth]==:required and !@state.include?(:auth))\n send_data \"550 This server requires authentication before MAIL FROM\\r\\n\"\n elsif @state.include?(:mail_from)\n send_data \"503 MAIL already given\\r\\n\"\n else\n unless receive_sender sender\n send_data \"550 sender is unacceptable\\r\\n\"\n else\n send_data \"250 Ok\\r\\n\"\n @state << :mail_from\n end\n end\n end",
"def serialized_sender\n user = User.current_user || donor\n Api::V1::UserSerializer.new(user)\n end",
"def reply_to_sender(delivery, reply_body, subject = nil)\n return reply(delivery.conversation, delivery.message.sender, reply_body, subject)\n end",
"def sent_only_to_me=(value)\n @sent_only_to_me = value\n end",
"def sender\n ENV['NOTIFICATION_FROM_EMAIL'] || 'noreply@some.gov'\n end",
"def set_Receiver(value)\n set_input(\"Receiver\", value)\n end",
"def originator\n @originator = self.original_message.sender if @originator.nil?\n return @originator\n end",
"def from\n @_from || payload && payload['from']\n end",
"def sender(queue_name, opts={}, &blk)\n create(queue_name, :sender, opts, &blk)\n end",
"def set_Receiver(value)\n set_input(\"Receiver\", value)\n end",
"def sender\r\n SenderController.instance\r\n end",
"def reply_to\n (@reply_to || self.from)\n end",
"def sendMail(sender, receiver)\n self.source = sender\n sender.sent << self\n self.target = receiver\n receiver.inbox << self\n self.created_at = DateTime.now\n self.save\n end",
"def sender_date_time=(value)\n @sender_date_time = value\n end",
"def reply_to=( val )\n header[:reply_to] = val\n end",
"def set_sender\n @survivor = Survivor.find(params[:survivor_id])\n @sender = Survivor.find(params[:sender_id])\n\n action_errors\n end",
"def from(from)\n @value[:from] = from\n self\n end",
"def set_interface_sender\n @interface_sender = InterfaceSender.find(params[:id])\n end",
"def primary_smtp_address=(value)\n @primary_smtp_address = value\n end",
"def mail_from\n if @yaml[\"mail\"][\"from\"] != \"example@example.com\"\n return @yaml[\"mail\"][\"from\"]\n end\n end",
"def email=(value)\n reset_agent\n @email = value\n end",
"def src_ip\n self[:sender_ip]\n end",
"def set_InReplyTo(value)\n set_input(\"InReplyTo\", value)\n end",
"def set_originator(message)\n # no originator. For jobs that are only called from within other\n # jobs, there is no need to override this method.\n end",
"def from=(from)\n if from.nil?\n fail ArgumentError, 'invalid value for \"from\", from cannot be nil.'\n end\n @from = from\n end",
"def send_confirmations_to_owner=(value)\n @send_confirmations_to_owner = value\n end",
"def user\n if scope.id == object.sender_user_id\n object.recipient_user\n else\n object.sender_user\n end\n end",
"def set_usr_buyer_property\n @user_contact = UsrContact.find(id = current_usr_contact.id)\n end",
"def set_owner\n @owner = Owner.find_by_owner_user_id(current_owner_user.id)\n end",
"def from=( val )\n header[:from] = val\n end",
"def email_set(address)\n self.email.set address\n end",
"def send_email(recipient, sender)\n @recipient = recipient\n @sender = sender\n mail(to: @recipient.email, subject: 'Wunschliste')\n end",
"def set_originator\n @originator = Originator.find(params[:id])\n end",
"def set_owner #:doc:\n self.owner_class.owner= get_current_owner\n end",
"def from(value)\n @from = value\n @options[:from] = value\n self\n end",
"def from=(value)\n @from = value\n end",
"def from=(value)\n @from = value\n end",
"def sent_to_addresses=(value)\n @sent_to_addresses = value\n end",
"def set_author(name, email=nil)\n self.actor = name ? Grit::Actor.new(name, (email ? email : '')) : nil\n end"
] | [
"0.81107867",
"0.81107867",
"0.76451635",
"0.7347496",
"0.7240935",
"0.72400665",
"0.7126398",
"0.7087838",
"0.70114356",
"0.68866825",
"0.67442554",
"0.6702892",
"0.66146463",
"0.64597374",
"0.6443462",
"0.6348953",
"0.62901735",
"0.6285501",
"0.62735003",
"0.6270729",
"0.62542737",
"0.6245615",
"0.6216492",
"0.62077755",
"0.62064046",
"0.6203553",
"0.61833614",
"0.61170435",
"0.60280985",
"0.6009476",
"0.5991464",
"0.5965181",
"0.5952897",
"0.5949556",
"0.5917319",
"0.58952945",
"0.58764833",
"0.5820023",
"0.5820023",
"0.58110267",
"0.5801763",
"0.58000207",
"0.57815963",
"0.57698387",
"0.57694167",
"0.5764514",
"0.5733423",
"0.5729238",
"0.5655948",
"0.5645988",
"0.56303596",
"0.5601155",
"0.5601155",
"0.5587468",
"0.55853415",
"0.55361575",
"0.55214393",
"0.5499044",
"0.54935426",
"0.54838717",
"0.548152",
"0.5473016",
"0.54635435",
"0.5456088",
"0.54515284",
"0.5435809",
"0.5427518",
"0.54026026",
"0.5399869",
"0.5398756",
"0.5377612",
"0.53772885",
"0.53528476",
"0.53501505",
"0.53451324",
"0.53250766",
"0.53107977",
"0.5303491",
"0.5295757",
"0.52942675",
"0.5285591",
"0.52605754",
"0.5249626",
"0.5232484",
"0.52320343",
"0.51816875",
"0.51719916",
"0.51621276",
"0.51595044",
"0.5158752",
"0.5156987",
"0.5153105",
"0.51459974",
"0.51400715",
"0.51328176",
"0.51233524",
"0.51233524",
"0.5118088",
"0.51139265"
] | 0.78481275 | 3 |
Serializes information the current object | def serialize(writer)
raise StandardError, 'writer cannot be null' if writer.nil?
super
writer.write_collection_of_object_values("attachments", @attachments)
writer.write_object_value("body", @body)
writer.write_string_value("conversationId", @conversation_id)
writer.write_string_value("conversationThreadId", @conversation_thread_id)
writer.write_collection_of_object_values("extensions", @extensions)
writer.write_object_value("from", @from)
writer.write_boolean_value("hasAttachments", @has_attachments)
writer.write_object_value("inReplyTo", @in_reply_to)
writer.write_collection_of_object_values("multiValueExtendedProperties", @multi_value_extended_properties)
writer.write_collection_of_object_values("newParticipants", @new_participants)
writer.write_date_time_value("receivedDateTime", @received_date_time)
writer.write_object_value("sender", @sender)
writer.write_collection_of_object_values("singleValueExtendedProperties", @single_value_extended_properties)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def serialize\n end",
"def serialize(object) end",
"def serialize; end",
"def serialize; end",
"def serialize\n \n end",
"def serialize\n raise NotImplementedError\n end",
"def serialize\n raise NotImplementedError\n end",
"def dump\r\n super + to_s\r\n end",
"def serialize\n self.to_hash.to_json\n end",
"def serialized\n serializer_class.new(self).serializable_hash\n end",
"def serialize\n @raw_data\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"details\", @details)\n writer.write_string_value(\"identityType\", @identity_type)\n end",
"def serialize\n @serializer.serialize(self.output)\n end",
"def serialize(_object, data); end",
"def serialize(_object, data); end",
"def serializer; end",
"def to_json\n\t\t\tself.instance_variable_hash\n\t\tend",
"def serialize!\n end",
"def serialize(object)\n object.serializable_hash\n end",
"def serialize(object)\n object.to_s\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_object_value(\"device\", @device)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_enum_value(\"keyStrength\", @key_strength)\n end",
"def marshal\n Marshal.dump self\n end",
"def marshal\n Marshal.dump self\n end",
"def marshal\n Marshal.dump self\n end",
"def inspect\n serialize.to_s\n end",
"def serialize\n YAML::dump(self)\n end",
"def inspect()\n serialize.to_s()\n end",
"def inspect()\n serialize.to_s()\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"accessPackage\", @access_package)\n writer.write_collection_of_object_values(\"answers\", @answers)\n writer.write_object_value(\"assignment\", @assignment)\n writer.write_date_time_value(\"completedDateTime\", @completed_date_time)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_collection_of_object_values(\"customExtensionCalloutInstances\", @custom_extension_callout_instances)\n writer.write_enum_value(\"requestType\", @request_type)\n writer.write_object_value(\"requestor\", @requestor)\n writer.write_object_value(\"schedule\", @schedule)\n writer.write_enum_value(\"state\", @state)\n writer.write_string_value(\"status\", @status)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"initiator\", @initiator)\n writer.write_collection_of_object_values(\"members\", @members)\n writer.write_date_time_value(\"visibleHistoryStartDateTime\", @visible_history_start_date_time)\n end",
"def inspect\n fields = serializable_hash.map { |k, v| \"#{k}=#{v}\" }\n \"#<#{self.class.name}:#{object_id} #{fields.join(' ')}>\"\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_primitive_values(\"aliases\", @aliases)\n writer.write_collection_of_object_values(\"countriesOrRegionsOfOrigin\", @countries_or_regions_of_origin)\n writer.write_object_value(\"description\", @description)\n writer.write_date_time_value(\"firstActiveDateTime\", @first_active_date_time)\n writer.write_collection_of_object_values(\"indicators\", @indicators)\n writer.write_enum_value(\"kind\", @kind)\n writer.write_object_value(\"summary\", @summary)\n writer.write_collection_of_primitive_values(\"targets\", @targets)\n writer.write_string_value(\"title\", @title)\n writer.write_object_value(\"tradecraft\", @tradecraft)\n end",
"def serialize(object, data); end",
"def serialize\n JSON.generate(to_h)\n end",
"def serialiaze\n Logger.d(\"Serializing the User object\")\n save_to_shared_prefs(@context, self.class, self)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n writer.write_object_value(\"cost\", @cost)\n writer.write_object_value(\"life\", @life)\n writer.write_object_value(\"per\", @per)\n writer.write_object_value(\"salvage\", @salvage)\n writer.write_additional_data(@additional_data)\n end",
"def inspect\n id_string = (respond_to?(:id) && !id.nil?) ? \" id=#{id}\" : ''\n \"#<#{self.class}:0x#{object_id.to_s(16)}#{id_string}> JSON: \" +\n Clever::JSON.dump(@values, pretty: true)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"owner\", @owner)\n writer.write_collection_of_object_values(\"properties\", @properties)\n writer.write_string_value(\"status\", @status)\n writer.write_collection_of_primitive_values(\"targetTypes\", @target_types)\n end",
"def write\n hash = attributes_hash\n write_value(serializer_class.dump(hash))\n @_cache = hash # set @_cache after the write\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"appDisplayName\", @app_display_name)\n writer.write_string_value(\"dataType\", @data_type)\n writer.write_boolean_value(\"isSyncedFromOnPremises\", @is_synced_from_on_premises)\n writer.write_string_value(\"name\", @name)\n writer.write_collection_of_primitive_values(\"targetObjects\", @target_objects)\n end",
"def instance_to_json\n\t\t# byebug\n\t\t{\n\t\tid: self.id,\n\t\tname: self.name,\n\t\theight: self.height,\n\t\tlast_watered: self.last_watered,\n\t\tlast_watered_amount: self.last_watered_amount,\n\t\tgrow_zone: self.grow_zone,\n\t\tnotes: self.notes,\n\t\tplanted_date: self.planted_date,\n\t\tfarm: self.farm,\t\n\t\tsensor: self.sensor\n\t\t# farm: { \n\t\t# \tfarm: self.farm.name,\n\t\t# \tfarm: self.farm.id,\n\t\t# },\n\t\t}\n\tend",
"def _dump(depth)\n scrooge_fetch_remaining\n scrooge_invalidate_updateable_result_set\n scrooge_dump_flag_this\n str = Marshal.dump(self)\n scrooge_dump_unflag_this\n str\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id} #{info}>\"\n end",
"def to_dump\n @time = Time.now\n Base64.encode64(Marshal.dump(self))\n end",
"def dump\n\t\t\t\tflatten!\n\t\t\t\t\n\t\t\t\tMessagePack.dump(@attributes)\n\t\t\tend",
"def inspect\n serialize.to_s\n end",
"def inspect\n serialize.to_s\n end",
"def inspect\n serialize.to_s\n end",
"def serialize(options={})\n raise NotImplementedError, \"Please implement this in your concrete class\"\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"apiConnectorConfiguration\", @api_connector_configuration)\n writer.write_collection_of_object_values(\"identityProviders\", @identity_providers)\n writer.write_collection_of_object_values(\"languages\", @languages)\n writer.write_collection_of_object_values(\"userAttributeAssignments\", @user_attribute_assignments)\n writer.write_collection_of_object_values(\"userFlowIdentityProviders\", @user_flow_identity_providers)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"clientContext\", @client_context)\n writer.write_object_value(\"resultInfo\", @result_info)\n writer.write_enum_value(\"status\", @status)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_number_value(\"memberCount\", @member_count)\n writer.write_collection_of_object_values(\"members\", @members)\n writer.write_enum_value(\"tagType\", @tag_type)\n writer.write_string_value(\"teamId\", @team_id)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_object_value(\"resource\", @resource)\n writer.write_object_value(\"weight\", @weight)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"comment\", @comment)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_date_time_value(\"deletedDateTime\", @deleted_date_time)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_collection_of_object_values(\"history\", @history)\n writer.write_boolean_value(\"hostOnly\", @host_only)\n writer.write_string_value(\"hostOrDomain\", @host_or_domain)\n writer.write_object_value(\"lastModifiedBy\", @last_modified_by)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_string_value(\"path\", @path)\n writer.write_enum_value(\"sourceEnvironment\", @source_environment)\n writer.write_enum_value(\"status\", @status)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"category\", @category)\n writer.write_date_time_value(\"firstSeenDateTime\", @first_seen_date_time)\n writer.write_object_value(\"host\", @host)\n writer.write_date_time_value(\"lastSeenDateTime\", @last_seen_date_time)\n writer.write_string_value(\"name\", @name)\n writer.write_string_value(\"version\", @version)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"large\", @large)\n writer.write_object_value(\"medium\", @medium)\n writer.write_object_value(\"small\", @small)\n writer.write_object_value(\"source\", @source)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"accessPackage\", @access_package)\n writer.write_enum_value(\"allowedTargetScope\", @allowed_target_scope)\n writer.write_object_value(\"automaticRequestSettings\", @automatic_request_settings)\n writer.write_object_value(\"catalog\", @catalog)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_collection_of_object_values(\"customExtensionStageSettings\", @custom_extension_stage_settings)\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_object_value(\"expiration\", @expiration)\n writer.write_date_time_value(\"modifiedDateTime\", @modified_date_time)\n writer.write_collection_of_object_values(\"questions\", @questions)\n writer.write_object_value(\"requestApprovalSettings\", @request_approval_settings)\n writer.write_object_value(\"requestorSettings\", @requestor_settings)\n writer.write_object_value(\"reviewSettings\", @review_settings)\n writer.write_collection_of_object_values(\"specificAllowedTargets\", @specific_allowed_targets)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"assignedTo\", @assigned_to)\n writer.write_date_time_value(\"closedDateTime\", @closed_date_time)\n writer.write_object_value(\"createdBy\", @created_by)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_object_value(\"dataSubject\", @data_subject)\n writer.write_enum_value(\"dataSubjectType\", @data_subject_type)\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_collection_of_object_values(\"history\", @history)\n writer.write_object_value(\"insight\", @insight)\n writer.write_date_time_value(\"internalDueDateTime\", @internal_due_date_time)\n writer.write_object_value(\"lastModifiedBy\", @last_modified_by)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_collection_of_object_values(\"notes\", @notes)\n writer.write_collection_of_primitive_values(\"regulations\", @regulations)\n writer.write_collection_of_object_values(\"stages\", @stages)\n writer.write_enum_value(\"status\", @status)\n writer.write_object_value(\"team\", @team)\n writer.write_enum_value(\"type\", @type)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_string_value(\"deviceId\", @device_id)\n writer.write_string_value(\"key\", @key)\n writer.write_enum_value(\"volumeType\", @volume_type)\n end",
"def serializable_hash\n self.attributes\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"endDateTime\", @end_date_time)\n writer.write_string_value(\"joinWebUrl\", @join_web_url)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_collection_of_object_values(\"modalities\", @modalities)\n writer.write_object_value(\"organizer\", @organizer)\n writer.write_collection_of_object_values(\"participants\", @participants)\n writer.write_collection_of_object_values(\"sessions\", @sessions)\n writer.write_date_time_value(\"startDateTime\", @start_date_time)\n writer.write_enum_value(\"type\", @type)\n writer.write_object_value(\"version\", @version)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"axes\", @axes)\n writer.write_object_value(\"dataLabels\", @data_labels)\n writer.write_object_value(\"format\", @format)\n writer.write_object_value(\"height\", @height)\n writer.write_object_value(\"left\", @left)\n writer.write_object_value(\"legend\", @legend)\n writer.write_string_value(\"name\", @name)\n writer.write_collection_of_object_values(\"series\", @series)\n writer.write_object_value(\"title\", @title)\n writer.write_object_value(\"top\", @top)\n writer.write_object_value(\"width\", @width)\n writer.write_object_value(\"worksheet\", @worksheet)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"description\", @description)\n writer.write_object_value(\"details\", @details)\n writer.write_string_value(\"name\", @name)\n writer.write_enum_value(\"scenarios\", @scenarios)\n end",
"def serialize\n JSON.dump(@hash)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_boolean_value(\"isUsable\", @is_usable)\n writer.write_boolean_value(\"isUsableOnce\", @is_usable_once)\n writer.write_number_value(\"lifetimeInMinutes\", @lifetime_in_minutes)\n writer.write_string_value(\"methodUsabilityReason\", @method_usability_reason)\n writer.write_date_time_value(\"startDateTime\", @start_date_time)\n writer.write_string_value(\"temporaryAccessPass\", @temporary_access_pass)\n end",
"def to_s\r\n dump\r\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"callee\", @callee)\n writer.write_object_value(\"caller\", @caller)\n writer.write_date_time_value(\"endDateTime\", @end_date_time)\n writer.write_object_value(\"failureInfo\", @failure_info)\n writer.write_collection_of_object_values(\"media\", @media)\n writer.write_date_time_value(\"startDateTime\", @start_date_time)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_number_value(\"deviceCount\", @device_count)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_collection_of_object_values(\"managedDevices\", @managed_devices)\n writer.write_enum_value(\"platform\", @platform)\n writer.write_string_value(\"publisher\", @publisher)\n writer.write_object_value(\"sizeInByte\", @size_in_byte)\n writer.write_string_value(\"version\", @version)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_collection_of_object_values(\"members\", @members)\n writer.write_string_value(\"roleTemplateId\", @role_template_id)\n writer.write_collection_of_object_values(\"scopedMembers\", @scoped_members)\n end",
"def serialize(io)\n Encoder.encode(io, self)\n io\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"options\", @options)\n writer.write_boolean_value(\"protected\", @protected)\n end",
"def _dump() end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"authenticationConfiguration\", @authentication_configuration)\n writer.write_object_value(\"clientConfiguration\", @client_configuration)\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_object_value(\"endpointConfiguration\", @endpoint_configuration)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"container\", @container)\n writer.write_string_value(\"containerId\", @container_id)\n writer.write_object_value(\"lastModifiedBy\", @last_modified_by)\n writer.write_object_value(\"member\", @member)\n writer.write_string_value(\"memberId\", @member_id)\n writer.write_enum_value(\"outlierContainerType\", @outlier_container_type)\n writer.write_enum_value(\"outlierMemberType\", @outlier_member_type)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_object_value(\"body\", @body)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_string_value(\"imageUrl\", @image_url)\n writer.write_collection_of_object_values(\"indicators\", @indicators)\n writer.write_boolean_value(\"isFeatured\", @is_featured)\n writer.write_date_time_value(\"lastUpdatedDateTime\", @last_updated_date_time)\n writer.write_object_value(\"summary\", @summary)\n writer.write_collection_of_primitive_values(\"tags\", @tags)\n writer.write_string_value(\"title\", @title)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_date_time_value(\"completedDateTime\", @completed_date_time)\n writer.write_object_value(\"progress\", @progress)\n writer.write_enum_value(\"status\", @status)\n writer.write_string_value(\"storageLocation\", @storage_location)\n writer.write_date_time_value(\"submittedDateTime\", @submitted_date_time)\n writer.write_string_value(\"userId\", @user_id)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"accessPackages\", @access_packages)\n writer.write_enum_value(\"catalogType\", @catalog_type)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_collection_of_object_values(\"customWorkflowExtensions\", @custom_workflow_extensions)\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_boolean_value(\"isExternallyVisible\", @is_externally_visible)\n writer.write_date_time_value(\"modifiedDateTime\", @modified_date_time)\n writer.write_collection_of_object_values(\"resourceRoles\", @resource_roles)\n writer.write_collection_of_object_values(\"resourceScopes\", @resource_scopes)\n writer.write_collection_of_object_values(\"resources\", @resources)\n writer.write_enum_value(\"state\", @state)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"bundles\", @bundles)\n writer.write_string_value(\"driveType\", @drive_type)\n writer.write_collection_of_object_values(\"following\", @following)\n writer.write_collection_of_object_values(\"items\", @items)\n writer.write_object_value(\"list\", @list)\n writer.write_object_value(\"owner\", @owner)\n writer.write_object_value(\"quota\", @quota)\n writer.write_object_value(\"root\", @root)\n writer.write_object_value(\"sharePointIds\", @share_point_ids)\n writer.write_collection_of_object_values(\"special\", @special)\n writer.write_object_value(\"system\", @system)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_enum_value(\"classification\", @classification)\n writer.write_string_value(\"feature\", @feature)\n writer.write_string_value(\"featureGroup\", @feature_group)\n writer.write_string_value(\"impactDescription\", @impact_description)\n writer.write_boolean_value(\"isResolved\", @is_resolved)\n writer.write_enum_value(\"origin\", @origin)\n writer.write_collection_of_object_values(\"posts\", @posts)\n writer.write_string_value(\"service\", @service)\n writer.write_enum_value(\"status\", @status)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"connectors\", @connectors)\n writer.write_boolean_value(\"hasPhysicalDevice\", @has_physical_device)\n writer.write_boolean_value(\"isShared\", @is_shared)\n writer.write_date_time_value(\"lastSeenDateTime\", @last_seen_date_time)\n writer.write_date_time_value(\"registeredDateTime\", @registered_date_time)\n writer.write_collection_of_object_values(\"shares\", @shares)\n writer.write_collection_of_object_values(\"taskTriggers\", @task_triggers)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"assignments\", @assignments)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_string_value(\"description\", @description)\n writer.write_collection_of_object_values(\"deviceStates\", @device_states)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_string_value(\"informationUrl\", @information_url)\n writer.write_object_value(\"installSummary\", @install_summary)\n writer.write_object_value(\"largeCover\", @large_cover)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_string_value(\"privacyInformationUrl\", @privacy_information_url)\n writer.write_date_time_value(\"publishedDateTime\", @published_date_time)\n writer.write_string_value(\"publisher\", @publisher)\n writer.write_collection_of_object_values(\"userStateSummary\", @user_state_summary)\n end",
"def inspect\n attributes = [\n \"name=#{name.inspect}\",\n \"key=#{key.inspect}\",\n \"data_type=#{data_type.inspect}\",\n ]\n \"#<#{self.class.name}:#{object_id} #{attributes.join(', ')}>\"\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"assignments\", @assignments)\n writer.write_collection_of_object_values(\"categories\", @categories)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_string_value(\"description\", @description)\n writer.write_string_value(\"developer\", @developer)\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_string_value(\"informationUrl\", @information_url)\n writer.write_boolean_value(\"isFeatured\", @is_featured)\n writer.write_object_value(\"largeIcon\", @large_icon)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_string_value(\"notes\", @notes)\n writer.write_string_value(\"owner\", @owner)\n writer.write_string_value(\"privacyInformationUrl\", @privacy_information_url)\n writer.write_string_value(\"publisher\", @publisher)\n writer.write_enum_value(\"publishingState\", @publishing_state)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_enum_value(\"platformType\", @platform_type)\n writer.write_number_value(\"settingCount\", @setting_count)\n writer.write_collection_of_object_values(\"settingStates\", @setting_states)\n writer.write_enum_value(\"state\", @state)\n writer.write_number_value(\"version\", @version)\n end",
"def _dump()\n #This is a stub, used for indexing\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"displayName\", @display_name)\n writer.write_string_value(\"templateId\", @template_id)\n writer.write_collection_of_object_values(\"values\", @values)\n end",
"def marshal_dump\n { \n :klass => self.class.to_s, \n :values => @attribute_values_flat, \n :joined => @joined_models\n }\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"containers\", @containers)\n writer.write_object_value(\"controller\", @controller)\n writer.write_collection_of_object_values(\"ephemeralContainers\", @ephemeral_containers)\n writer.write_collection_of_object_values(\"initContainers\", @init_containers)\n writer.write_object_value(\"labels\", @labels)\n writer.write_string_value(\"name\", @name)\n writer.write_object_value(\"namespace\", @namespace)\n writer.write_object_value(\"podIp\", @pod_ip)\n writer.write_object_value(\"serviceAccount\", @service_account)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_enum_value(\"detectionStatus\", @detection_status)\n writer.write_object_value(\"imageFile\", @image_file)\n writer.write_string_value(\"mdeDeviceId\", @mde_device_id)\n writer.write_date_time_value(\"parentProcessCreationDateTime\", @parent_process_creation_date_time)\n writer.write_object_value(\"parentProcessId\", @parent_process_id)\n writer.write_object_value(\"parentProcessImageFile\", @parent_process_image_file)\n writer.write_string_value(\"processCommandLine\", @process_command_line)\n writer.write_date_time_value(\"processCreationDateTime\", @process_creation_date_time)\n writer.write_object_value(\"processId\", @process_id)\n writer.write_object_value(\"userAccount\", @user_account)\n end",
"def inspect\n self.to_hash.inspect\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_collection_of_object_values(\"administrativeUnits\", @administrative_units)\n writer.write_collection_of_object_values(\"attributeSets\", @attribute_sets)\n writer.write_collection_of_object_values(\"customSecurityAttributeDefinitions\", @custom_security_attribute_definitions)\n writer.write_collection_of_object_values(\"deletedItems\", @deleted_items)\n writer.write_collection_of_object_values(\"federationConfigurations\", @federation_configurations)\n writer.write_collection_of_object_values(\"onPremisesSynchronization\", @on_premises_synchronization)\n end",
"def inspect\n \"#<#{self.class}:0x#{object_id.to_s(16)}> JSON: \" +\n JSON.pretty_generate(@data)\n end",
"def encode\n raise Errors::SerializerNotConfigured if serializer_missing?\n\n serializer.encode(self)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n super\n writer.write_string_value(\"activationUrl\", @activation_url)\n writer.write_string_value(\"activitySourceHost\", @activity_source_host)\n writer.write_string_value(\"appActivityId\", @app_activity_id)\n writer.write_string_value(\"appDisplayName\", @app_display_name)\n writer.write_object_value(\"contentInfo\", @content_info)\n writer.write_string_value(\"contentUrl\", @content_url)\n writer.write_date_time_value(\"createdDateTime\", @created_date_time)\n writer.write_date_time_value(\"expirationDateTime\", @expiration_date_time)\n writer.write_string_value(\"fallbackUrl\", @fallback_url)\n writer.write_collection_of_object_values(\"historyItems\", @history_items)\n writer.write_date_time_value(\"lastModifiedDateTime\", @last_modified_date_time)\n writer.write_enum_value(\"status\", @status)\n writer.write_string_value(\"userTimezone\", @user_timezone)\n writer.write_object_value(\"visualElements\", @visual_elements)\n end",
"def serialize\n super(ATTR_NAME_ARY)\n end",
"def serialize\n super(ATTR_NAME_ARY)\n end",
"def serialize\n super(ATTR_NAME_ARY)\n end",
"def serialize\n super(ATTR_NAME_ARY)\n end",
"def serialize\n super(ATTR_NAME_ARY)\n end",
"def serialize(writer)\n raise StandardError, 'writer cannot be null' if writer.nil?\n writer.write_object_value(\"basis\", @basis)\n writer.write_object_value(\"cost\", @cost)\n writer.write_object_value(\"datePurchased\", @date_purchased)\n writer.write_object_value(\"firstPeriod\", @first_period)\n writer.write_object_value(\"period\", @period)\n writer.write_object_value(\"rate\", @rate)\n writer.write_object_value(\"salvage\", @salvage)\n writer.write_additional_data(@additional_data)\n end",
"def serialize(writer) \n super\n writer.write_collection_of_primitive_values(\"categories\", @categories)\n writer.write_string_value(\"changeKey\", @change_key)\n writer.write_date_value(\"createdDateTime\", @created_date_time)\n writer.write_date_value(\"lastModifiedDateTime\", @last_modified_date_time)\n end"
] | [
"0.79519033",
"0.76465106",
"0.75808734",
"0.75808734",
"0.7441225",
"0.7209669",
"0.7209669",
"0.720668",
"0.701747",
"0.7002693",
"0.69936407",
"0.6983181",
"0.6972398",
"0.69675887",
"0.69675887",
"0.6942931",
"0.69420123",
"0.6935524",
"0.69145155",
"0.6891068",
"0.68803483",
"0.6877558",
"0.6877558",
"0.6877558",
"0.6875324",
"0.68525094",
"0.68368477",
"0.68368477",
"0.6825207",
"0.6814586",
"0.6805507",
"0.6800522",
"0.67955524",
"0.6773115",
"0.6734066",
"0.6728008",
"0.66959995",
"0.66823745",
"0.6673096",
"0.66678214",
"0.66670305",
"0.6659405",
"0.6655963",
"0.66511476",
"0.6635537",
"0.662825",
"0.662825",
"0.662825",
"0.6627648",
"0.6616259",
"0.6613713",
"0.6610548",
"0.6596648",
"0.6596455",
"0.6594039",
"0.6591663",
"0.65837413",
"0.65800667",
"0.6580038",
"0.6576505",
"0.6571032",
"0.65701735",
"0.6566823",
"0.656081",
"0.6555125",
"0.65403765",
"0.65372765",
"0.65342295",
"0.65266466",
"0.65248185",
"0.65241057",
"0.6523422",
"0.6517231",
"0.65129846",
"0.6507672",
"0.6499402",
"0.6498554",
"0.6496918",
"0.6486062",
"0.64815867",
"0.6481207",
"0.6479458",
"0.64761055",
"0.64750475",
"0.64679796",
"0.6462904",
"0.6462426",
"0.6460337",
"0.645633",
"0.6448957",
"0.64389867",
"0.6433835",
"0.64322424",
"0.6424561",
"0.6421094",
"0.6421094",
"0.6421094",
"0.6421094",
"0.6421094",
"0.641823",
"0.64144397"
] | 0.0 | -1 |
Gets the singleValueExtendedProperties property value. The collection of singlevalue extended properties defined for the post. Readonly. Nullable. | def single_value_extended_properties
return @single_value_extended_properties
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def single_value_extended_properties=(value)\n @single_value_extended_properties = value\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def value\n if allows_nil? && properties.values.all?(&:nil?)\n nil\n else\n Hash[properties.map { |key, value| [key, value.value] }]\n end\n end",
"def extension_properties\n return @extension_properties\n end",
"def single_value?\n @single_value\n end",
"def additional_properties\n @additional_properties.nil? ? true : @additional_properties\n end",
"def get_boolean(property, data, single: true)\n get_property(property, data, single: single) do |value, _type|\n value ? true : false\n end\n end",
"def properties\n return @values['properties'] if @values.key?('properties')\n @values['properties'] = {}\n @values['properties']\n end",
"def value\n @property_hash[:value]\n end",
"def field_extras\r\n return @field_extras\r\n end",
"def get_properties()\n return @properties\n end",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def customized_properties\n current_resource.info['properties'].select do |_k, v|\n v['is_set'] == true\n end\n end",
"def parse_extended_properties(extended_properties) # :nodoc\n query_parts = []\n ['shared', 'private'].each do |prop_type|\n next unless extended_properties[prop_type]\n query_parts << extended_properties[prop_type].map {|key, value| (prop_type == \"shared\" ? \"sharedExtendedProperty=\" : \"privateExtendedProperty=\") + \"#{key}%3D#{value}\" }.join(\"&\")\n end\n query_parts.join('&')\n end",
"def nullable\n self['nullable']\n end",
"def payload_extended_options\n opts['Payload'] ? opts['Payload']['ExtendedOptions'] : nil\n end",
"def get_raw_property_value(name)\n return @property_values[name]\n end",
"def value_type?\n @property.value_type?\n end",
"def values_for_properties; end",
"def extension_attribute1\n return @extension_attribute1\n end",
"def get_additionals\n return Additional.find(:all, :conditions => [\"type_id = ? AND owner = ?\", self.id, 'property'])\n end",
"def single_value?\n self[:value] ||= {}\n is_check_box_type?(/^single$/)\n end",
"def as_extended_json(**options)\n if options[:mode] == :relaxed || options[:mode] == :legacy\n value\n else\n {'$numberInt' => value.to_s}\n end\n end",
"def property_properties\n _property_properties\n end",
"def get_property(property, data, is_url: false, single: true, &block)\n values = data ? data[property] : nil\n if values.is_a?(Array)\n values = values.map do |value|\n get_property_value(value, is_url: is_url, &block)\n end\n single ? values[0] : values\n else\n value = get_property_value(values, is_url: is_url, &block)\n single ? value : [value]\n end\n end",
"def single_data_value?\n RECORD_INFO[type].single_data_value?\n end",
"def primary_property\n pp = self.variant_properties.find_by(primary: true)\n pp ? pp : self.variant_properties.first\n end",
"def master_property\n @data[\"master_property\"]\n end",
"def properties\n return @properties\n end",
"def multi_valued?\n @multi_valued\n end",
"def primitive\n @property_hash[:primitive]\n end",
"def form_object_value\n if @field_name.to_s.start_with?('extcol_')\n (@page_config.form_object[:extended_columns] || {})[@field_name.to_s.delete_prefix('extcol_')]\n elsif @field_config[:parent_field]\n parent_hash[@field_name.to_s] || parent_hash[@field_name]\n else\n @page_config.form_object[@field_name]\n end\n end",
"def custom_extension_stage_instance_detail\n return @custom_extension_stage_instance_detail\n end",
"def get_property(property)\n get_compound_value(get_value(property))\n end",
"def modified_properties\n return @modified_properties\n end",
"def data_value()\n (single_data_value?) ? @data.value[0] : @data.value\n end",
"def properties\n self.persistent_class.property_iterator.to_a.inject({}) do |h, value|\n if !value.respond_to?(:getRubyValue)\n h[value.name] = value\n end\n h\n end\n end",
"def value\n self['value']\n end",
"def get_value(property)\n if @env_properties.get_value(property)\n return @env_properties.get_value(property)\n end\n @file_properties.get_value(property)\n end",
"def get_multi_post_attributes_1(key, value)\n a = JSON.parse(self.class.post('/postcodes', body:{ \"postcodes\": postcodes_arr}).body)\n a[\"#{key}\"][0][\"#{key}\"][\"#{value}\"]\n end",
"def value_raw\n @value\n end",
"def properties\n self.values\n end",
"def normalize_scalar_property_value(value)\n return \"NaN\" if value.kind_of?(Float) && value.nan?\n\n case value\n when true, false, nil then value\n when ActiveSupport::Duration then value.to_i\n when Numeric then value\n when String then value.strip\n when Symbol then value.to_s.strip\n when Time then value.getutc.strftime(\"%Y-%m-%dT%H:%M:%S\")\n when IPAddr then value.to_s\n when FLOAT_INFINITY then \"+infinity\"\n when -FLOAT_INFINITY then \"-infinity\"\n when Array then\n out = value.map { |e| normalize_scalar_property_value(e) }\n out = :invalid_property_value if out.detect { |e| e == :invalid_property_value }\n out\n else :invalid_property_value\n end\n end",
"def value\n attributes.fetch(:value)\n end",
"def get_property(property, data, uri = nil, is_url: false, single: true, &block)\n values = data ? data[property] : nil\n if values.is_a?(Array)\n values = values.map { |value| get_property_value(value, is_url: is_url, &block) }\n single ? values[0] : values\n else\n value = get_property_value(values, is_url: is_url, &block)\n single ? value : [value]\n end\n end",
"def get_field_deserializers()\n return super.merge({\n \"displayName\" => lambda {|n| @display_name = n.get_string_value() },\n \"templateId\" => lambda {|n| @template_id = n.get_string_value() },\n \"values\" => lambda {|n| @values = n.get_collection_of_object_values(lambda {|pn| MicrosoftGraph::Models::SettingValue.create_from_discriminator_value(pn) }) },\n })\n end",
"def prodid\n prodid_property ? prodid_property.ruby_value : nil\n end",
"def single_value?\n return false\n end",
"def properties\n @properties ||= {\n size: Integer.new(@value.size),\n length: Integer.new(@value.size),\n empty?: Boolean.new(@value.size.zero?),\n first: @value.first,\n head: @value.first,\n last: @value.last,\n rest: List.new(@value.rest),\n tail: List.new(@value.tail),\n }\n end",
"def property\n @property\n end",
"def extension_attribute13\n return @extension_attribute13\n end",
"def get_date(property, data, single: true)\n get_property(property, data, single: single) do |value, _type|\n DateTime.parse(value)\n end\n end",
"def added_properties\n\t\treturn @added_properties unless @added_properties.nil?\n\t\tcalculate_changes\n\t\treturn @added_properties\n\tend",
"def extension_attribute7\n return @extension_attribute7\n end",
"def property_converter\n FedoraValue\n end",
"def custom_fields_response\n data[:properties].custom_field_property do\n key :type, \"array\"\n key :xml, wrapped: true\n items do\n key :\"$ref\", \"CustomFieldValueApiResponse\"\n end\n end\n end",
"def extras\n @extras\n end",
"def get_attr_value_from_ext_attr( m_name, args )\n \n if self.new_record?\n \n value = nil\n \n self.extended_attributes.each do |extended_attr|\n value = ( extended_attr.value.blank? ? nil : extended_attr.value ) if( extended_attr.attr_name == m_name )\n end\n value\n \n else\n extended_attribute = find_in_extended_attr( m_name )\n extended_attribute.value if extended_attribute\n end\n \n end",
"def get_field_value(field)\n field_values = get_fields(field)\n return nil if field_values.blank?\n if field_values.first.delegation_field.multiple\n field_values.map(&:to_value)\n else\n field_values.first.to_value\n end\n end",
"def custom_properties\n custom_attribute_names\n end",
"def properties\n { 'object_type' => 'array', 'elements' => @elements.map(&:properties) }\n end",
"def value_type\n return @value_type\n end",
"def properties\n model.properties\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end",
"def additional_data\n return @additional_data\n end"
] | [
"0.7429581",
"0.7429581",
"0.7429581",
"0.73794264",
"0.73794264",
"0.73794264",
"0.64755225",
"0.64755225",
"0.64755225",
"0.5374968",
"0.5173842",
"0.5150889",
"0.5003208",
"0.50031954",
"0.49146128",
"0.4742407",
"0.47397512",
"0.4735001",
"0.4692821",
"0.46807307",
"0.4680027",
"0.46755686",
"0.46675697",
"0.46458265",
"0.463563",
"0.46073818",
"0.45935845",
"0.458773",
"0.45775053",
"0.456082",
"0.45338717",
"0.45173123",
"0.4484432",
"0.44675562",
"0.44425714",
"0.44180638",
"0.44107732",
"0.4399853",
"0.4391133",
"0.43896008",
"0.43886536",
"0.43751472",
"0.43696594",
"0.4360419",
"0.43582794",
"0.43532726",
"0.43461427",
"0.43424967",
"0.43420252",
"0.43144733",
"0.43125886",
"0.43107828",
"0.43068767",
"0.43064955",
"0.43064263",
"0.43034714",
"0.42986685",
"0.42896837",
"0.428396",
"0.42808062",
"0.42729145",
"0.42665383",
"0.42632464",
"0.42624184",
"0.42616662",
"0.42596722",
"0.42556527",
"0.42536706",
"0.42453626",
"0.42451805",
"0.42393973",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456",
"0.42385456"
] | 0.81328946 | 2 |
Sets the singleValueExtendedProperties property value. The collection of singlevalue extended properties defined for the post. Readonly. Nullable. | def single_value_extended_properties=(value)
@single_value_extended_properties = value
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def multi_value_extended_properties=(value)\n @multi_value_extended_properties = value\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def single_value_extended_properties\n return @single_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def multi_value_extended_properties\n return @multi_value_extended_properties\n end",
"def extension_properties=(value)\n @extension_properties = value\n end",
"def value=(value)\n\t\tself.updated_at = Time.now\n\t\tif is_multi_object?\n\t\t\t(@value ||= []) << value\n\t\telse\n\t\t\t@value = value\n\t\tend\n\tend",
"def extension_attribute1=(value)\n @extension_attribute1 = value\n end",
"def custom_extension_stage_instance_detail=(value)\n @custom_extension_stage_instance_detail = value\n end",
"def multivalued=(value)\n @multivalued = value\n end",
"def extended_ingredients=(extended_ingredients)\n if !extended_ingredients.nil? && extended_ingredients.length < 0\n fail ArgumentError, 'invalid value for \"extended_ingredients\", number of items must be greater than or equal to 0.'\n end\n\n @extended_ingredients = extended_ingredients\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def set_PopulateProperties(value)\n set_input(\"PopulateProperties\", value)\n end",
"def set_boolean_value\n @values = [Value.new(@context, true)]\n end",
"def properties=(value)\n if value == @defaults['properties']\n @values.delete 'properties' if @values.key? 'properties'\n else\n @values['properties'] = value\n end\n end",
"def properties=(value)\n @properties = value\n end",
"def setExtractMetaTags(value)\n @fields['extract_meta_tags'] = value\n self\n end",
"def single_value?\n @single_value\n end",
"def method_missing(method, *params)\n property = method.to_s\n if property =~ /(.*)=$/ # It's a value assignment\n @newvalues ||= []\n @newvalues << $1\n @properties[$1] = clean_value(params[0])\n else\n if @properties.has_key?(property)\n if self.class.boolean_fields.include?(property)\n (@properties[property] == 1)\n else\n @properties[property]\n end\n else\n super\n end\n end\n end",
"def small=(value)\n @small = value\n end",
"def properties_value_params\r\n params.fetch(:properties_value, {}).permit(:value, :property_id, :is_show_website)\r\n end",
"def modified_properties=(value)\n @modified_properties = value\n end",
"def generic_objects=(value)\n unless value.nil?\n self[:generic_objects] = value.map{|generic_object| Calculated::Models::GenericObject.new(generic_object)}\n end\n end",
"def custom_workflow_extensions=(value)\n @custom_workflow_extensions = value\n end",
"def custom_extension_stage_settings=(value)\n @custom_extension_stage_settings = value\n end",
"def parse_extended_properties(extended_properties) # :nodoc\n query_parts = []\n ['shared', 'private'].each do |prop_type|\n next unless extended_properties[prop_type]\n query_parts << extended_properties[prop_type].map {|key, value| (prop_type == \"shared\" ? \"sharedExtendedProperty=\" : \"privateExtendedProperty=\") + \"#{key}%3D#{value}\" }.join(\"&\")\n end\n query_parts.join('&')\n end",
"def extension_attribute5=(value)\n @extension_attribute5 = value\n end",
"def extension_attribute13=(value)\n @extension_attribute13 = value\n end",
"def extension_attribute6=(value)\n @extension_attribute6 = value\n end",
"def single_value?\n self[:value] ||= {}\n is_check_box_type?(/^single$/)\n end",
"def medium=(value)\n @medium = value\n end",
"def single_pctl_enable=(single_pctl_enable)\n validator = EnumAttributeValidator.new('String', [\"platform-default\", \"No\", \"Yes\"])\n unless validator.valid?(single_pctl_enable)\n fail ArgumentError, \"invalid value for \\\"single_pctl_enable\\\", must be one of #{validator.allowable_values}.\"\n end\n @single_pctl_enable = single_pctl_enable\n end",
"def formatted_value=(obj)\n @formatted_value = obj\n end",
"def value=(value)\n @object.instance_variable_set(:\"@#{@name}\",coerce(value))\n end",
"def normalize_scalar_property_value(value)\n return \"NaN\" if value.kind_of?(Float) && value.nan?\n\n case value\n when true, false, nil then value\n when ActiveSupport::Duration then value.to_i\n when Numeric then value\n when String then value.strip\n when Symbol then value.to_s.strip\n when Time then value.getutc.strftime(\"%Y-%m-%dT%H:%M:%S\")\n when IPAddr then value.to_s\n when FLOAT_INFINITY then \"+infinity\"\n when -FLOAT_INFINITY then \"-infinity\"\n when Array then\n out = value.map { |e| normalize_scalar_property_value(e) }\n out = :invalid_property_value if out.detect { |e| e == :invalid_property_value }\n out\n else :invalid_property_value\n end\n end",
"def extension_attribute7=(value)\n @extension_attribute7 = value\n end",
"def nullable=(val)\n self['nullable'] = val\n end",
"def check_single_value(field, params, default_value = nil)\n @properties[field] = params[field] || default_value\n end",
"def set_value(object, value)\n value = cast_value(value)\n\n case value_type\n when 'integer' then object.value_integer = value\n when 'float' then object.value_float = value\n when 'string' then object.value_string = value\n else raise_invalid_type\n end\n end",
"def update_property_details(property_tags)\n # TODO Might be possible to remove and not needed if the surroundings and descriptions forms always are populated\n if property_tags.blank?\n return\n end\n\n property_details = self.property_details\n\n # Add the property details set to true\n property_tags.each do |tag, keep|\n if keep == \"true\"\n property_details.push(tag)\n end\n end\n\n # Remove all duplicate tags\n property_details = property_details.uniq\n # Remove nils\n property_details.delete_if {|tag| tag.nil?}\n\n # Remove the property details set to false\n property_tags.each do |tag, keep|\n if keep == \"false\"\n property_details.delete(tag)\n end\n end\n self.set(:property_details, property_details)\n end",
"def handle_single_values\n @bytecount = @extentinfo[:extent][:bytecount].to_i || 0\n @byteoffset = @extentinfo[:extent][:byteoffset].to_i || 0\n @startblock = @extentinfo[:extent][:startblock].to_i || 0\n @fileoffset = @extentinfo[:extent][:fileoffset].to_i || 0\n @partition = @extentinfo[:extent][:partition].to_i || 'b' \n end",
"def extensions=(value)\n subtags = Array(value).flatten\n self.extensions_sequence = subtags.empty? ? nil : subtags.join(HYPHEN)\n end",
"def set_custom_property(name, value, type = nil)\n # Valid types.\n valid_type = {\n 'text' => 1,\n 'date' => 1,\n 'number' => 1,\n 'number_int' => 1,\n 'bool' => 1\n }\n\n raise \"The name and value parameters must be defined in set_custom_property()\" if !name || (type != 'bool' && !value)\n\n # Determine the type for strings and numbers if it hasn't been specified.\n unless ptrue?(type)\n type = if value =~ /^\\d+$/\n 'number_int'\n elsif value =~\n /^([+-]?)(?=[0-9]|\\.[0-9])[0-9]*(\\.[0-9]*)?([Ee]([+-]?[0-9]+))?$/\n 'number'\n else\n 'text'\n end\n end\n\n # Check for valid validation types.\n raise \"Unknown custom type '$type' in set_custom_property()\" unless valid_type[type]\n\n # Check for strings longer than Excel's limit of 255 chars.\n raise \"Length of text custom value '$value' exceeds Excel's limit of 255 in set_custom_property()\" if type == 'text' && value.length > 255\n\n if type == 'bool'\n value = value ? 1 : 0\n end\n\n @custom_properties << [name, value, type]\n end",
"def set_properties(*values)\n if values.size == 1 && values.first.is_a?(Hash)\n # We are dealing with a hash\n values.first.each do |key, index|\n self.set_property(key, value)\n end\n else\n # We are dealing with a list/array\n values.flatten.each_with_index do |value, index|\n self.set_property(index, value)\n end\n end\n end",
"def elastic_x(bool = nil)\n return @properties[\"elasticX\"] if bool == nil\n @properties[\"elasticX\"] = bool\n return self\n end",
"def custom_extension_callout_instances=(value)\n @custom_extension_callout_instances = value\n end",
"def as_extended_json(**options)\n if options[:mode] == :relaxed || options[:mode] == :legacy\n value\n else\n {'$numberInt' => value.to_s}\n end\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end",
"def additional_data=(value)\n @additional_data = value\n end"
] | [
"0.71901274",
"0.71901274",
"0.71901274",
"0.62671053",
"0.62671053",
"0.62671053",
"0.5506882",
"0.5506882",
"0.5506882",
"0.5151416",
"0.4680786",
"0.45876145",
"0.43944556",
"0.4372095",
"0.43574515",
"0.4357376",
"0.4357376",
"0.43562883",
"0.43423206",
"0.42975816",
"0.42538726",
"0.4246663",
"0.42430288",
"0.4234759",
"0.42309365",
"0.42282942",
"0.4218303",
"0.4213203",
"0.4208929",
"0.4199227",
"0.41577137",
"0.41379726",
"0.4103154",
"0.40997684",
"0.40957555",
"0.40872195",
"0.40785742",
"0.40747485",
"0.4071573",
"0.40662456",
"0.40633047",
"0.40451366",
"0.40345606",
"0.40227228",
"0.4016099",
"0.40155336",
"0.40129277",
"0.40103912",
"0.3998712",
"0.39881584",
"0.3982965",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522",
"0.3974522"
] | 0.81243765 | 2 |
End of SE Segment Details Start of N3_PR Segment Details | def payer_address_two(*options)
@payer.address_two.to_s.strip.upcase
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def end_line\n attributes.fetch(:endLine)\n end",
"def segments; end",
"def segment_0; segments[0]; end",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def output_ris_end\n \"\"\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def sub_sector; end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def pos_rseg_header\n pos_page_body\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def segment\n return asset_on_segment_history.segment rescue nil\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def end\n attributes.fetch(:end)\n end",
"def end_pos; end",
"def end_pos; end",
"def snr\n part.nr\n end",
"def end_section\n end",
"def ipn_end_point; end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def start_line_number; end",
"def start_line_number; end",
"def involved_segment\n case position % 3\n when 0 then :front\n when 1 then :middle\n when 2 then :back\n end\n end",
"def metadata_start\n 2\n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def sub_hl\n end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def sn\n end",
"def stop\n return self.seq_region_end\n end",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def segments=(_arg0); end",
"def find_page_end\n \n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def ld_hl_sp_plusr8\n end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def paragraph_end; end",
"def pan_last4\r\n params['panLast4']\r\n end",
"def end(p0) end",
"def ipn_end_point=(_arg0); end",
"def span_start; end",
"def segment(index)\n \traise NotImplementedError\n end",
"def end_line kind\n end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def dynamic_segment\n raise(NotImplementedError)\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def to_s\n @segment\n end",
"def start_line\n attributes.fetch(:startLine)\n end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def claim_loop\n segments = []\n @eobs.each_with_index do |eob, index|\n @check_grouper.last_eob = eob\n @check = eob.check_information\n @job = @check.job\n if @check.micr_line_information && @check.micr_line_information.payer && @facility.details[:micr_line_info]\n @payer = @check.micr_line_information.payer\n else\n @payer = @check.payer\n end\n @facility_output_config = @facility.output_config(@payer.payer_type)\n @reason_codes = nil #this variable is used in child class for configurable section\n @eob = eob\n @claim = eob.claim_information\n @eob_index = index\n @services = eob.service_payment_eobs\n @is_claim_eob = (eob.category.upcase == \"CLAIM\")\n @count = count\n segments << transaction_set_line_number(index + 1)\n segments << transaction_statistics([eob])\n segments += generate_eobs\n end\n segments.flatten.compact\n end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def start_line_number=(_); end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def generate_eobs\n Output835.log.info \"\\n\\nPatient account number : #{@eob.patient_account_number}\"\n Output835.log.info \"This EOB has #{@services.length} service lines\"\n Output835.log.info \"This is a CLAIM LEVEL EOB\" if @is_claim_eob\n claim_segments = [claim_payment_loop, include_claim_dates]\n claim_segments << claim_supplemental_info unless @facility.details[:interest_in_service_line]\n claim_segments << claim_level_allowed_amount\n claim_segments << standard_industry_code_segments(@eob, @is_claim_eob) if @is_claim_eob\n claim_segments << service_payment_info_loop unless @is_claim_eob\n update_clp! claim_segments\n claim_segments.flatten.compact\n end",
"def segments\n if magic32?\n command(:LC_SEGMENT)\n else\n command(:LC_SEGMENT_64)\n end\n end",
"def details\n response = cs_get \"/segments/#{segment_id}.json\", {}\n Hashie::Mash.new(response)\n end",
"def visible_line_number\n @ev_height\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def second_line\n \"#{self.town} #{self.district} #{self.region}\".squeeze(\" \").strip\n end",
"def spren; end",
"def extract_end_page( existing_payload, solr_doc, fedora_doc )\n\n # for books and articles\n start_page = IngestHelpers.fedora_first_field_extract( fedora_doc, 'mods relatedItem part extent end' )\n return start_page if start_page.present?\n return nil\n end",
"def error_segment\n segment(100)\n end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def payment_advice_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FTX\\+AAG.*/)\n end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def end\n regions.max.end\n end",
"def starting_position; end",
"def pos_header\n @position - 2\n end",
"def exons\n unless defined?(@exons); parse_segmentpairs; end\n @exons\n end",
"def exons\n unless defined?(@exons); parse_segmentpairs; end\n @exons\n end",
"def segment_name()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.DetailedTimerComponentState_segment_name(@handle.ptr)\n result\n end",
"def stderrs; end",
"def s_optional\n SegmentReqs::Optional\n end",
"def parse_footnote_marker; end",
"def Segment\n A = 1\n B = 2\n C = 4\n D = 8\n E = 16\n F = 32\n G = 64\n H = 128\n J = 256\n K = 512\n M = 1024\n N = 2048\n P = 4096\n\n def initialize(segments=0)\n @segments = segments\n end\n\n def s(seg=Segment.new)\n Segment.new seg.segments + SOUTH\n end\nend",
"def coding_region_cdna_start\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.start_exon\n answer += self.translation.seq_start\n return answer\n else\n answer += exon.length\n end\n end\n \n end",
"def release_num\r\n segments = self.segments.dup\r\n segments.pop while segments.any? { |s| String === s }\r\n segments.pop if segments.size > 1\r\n\r\n segments[-1]\r\n end",
"def last_line_in_view()\n\t\tcoord_to_line_num(contents_y + height)\n\tend",
"def service_payment_info_loop\n segments = []\n @clp_05_amount = 0\n eob.service_payment_eobs.each_with_index do |service, index|\n service_klass = Output835.class_for(\"Service\", facility)\n Output835.log.info \"Applying class #{service_klass}\" if index == 0\n service_obj = service_klass.new(service, facility, payer, index, @element_seperator) if service\n service_segments = service_obj.generate\n segments += service_segments[0]\n @clp_05_amount += service_segments[1]\n end\n segments\n end",
"def pos_partial_page_header\n pos_fil_header + 4\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def print_segments\n\t\t# METHOD: after all of the HL7 content has been parsed, print the contents of each segment in a more easily readible format\n\t\t# output for 1 segment looks like:\n\t\t\t\t\t#~ :: Segment: PID\n\t\t\t\t\t#~ PID-0: Segment => PID\n\t\t\t\t\t#~ PID-1: Set ID - PID => 1\n\t\t\t\t\t#~ PID-2: Patient ID => \n\t\t\t\t\t#~ PID-3: Patient Identifier List => [[\"P00057804\", \"\", \"\", \"\", \"PN\"], [\"4009887514\", \"\", \"\", \"AUSHIC\", \"MC\"], [\"SMIAL001\", \"\", \"\", \"\", \"PI\"]]\n\t\t\t\t\t#~ PID-4: Alternate Patient ID - PID => \n\t\t\t\t\t#~ PID-5: Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\n\t\t\t\t\t#~ PID-6: Mother’s Maiden Name => \n\t\t\t\t\t#~ PID-7: Date/Time of Birth => 19770621\n\t\t\t\t\t#~ PID-8: Sex => M\n\t\t\t\t\t#~ PID-9: Patient Alias => \n\t\t\t\t\t#~ PID-10: Race => \n\t\t\t\t\t#~ PID-11: Patient Address => [\"818 Beach Road\", \"\", \"BEECHMERE\", \"\", \"4510\", \"AU\", \"H\"]\n\n\t\t\t# iterate over each segment\n\t\t\t@parsed_content.each do |segment|\n\t\t\t\tseg = segment[0]\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # eg => \"PID\"\n\t\t\t\t\n\t\t\t\t#get yaml file details\n\t\t\t\tyamlfile = \"hl7specification/#{seg}\"\t\t\t\t\t# for each segment, find the appropriate yaml file (ie one for each segment)\n\t\t\t\tspecs = YAML.load_file(yamlfile)\t\t\t\t\t\t\t# load the yaml file\n\t\t\t\t\n\t\t\t puts \":: #{specs[\"Header\"][\"name\"]} (#{seg})\"\t\t\t# print the text eg \":: Message Header Segment (MSH)\"\n\t\t\t \n\t\t\t # then iterate over each field in the particular segment\n\t\t\t\tsegment.each_with_index do |field, index|\t\t\t\t\t# then for each field...\n\t\t\t\t\tif index > 0 then\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only if the index is 1 or more (ie the first value is not useful here)\n\t\t\t\t\t\tfld = \"#{seg}-#{index}\"\t\t\t\t\t\t\t\t\t # get the field id => \"PID-5\"\n\t\t\t\t\t\tprint \" #{fld}: \"\t\t\t\t\t\t \t\t\t\t\t\t# on each line print the particular field being queried eg \"PID-5: \"\n\t\t\t\t\t\tfldname = specs[fld][\"name\"]\t\t\t\t\t\t\t\t\t# get the name of the field from the yaml file\n\t\t\t\t\t\tprint \"#{fldname} => \"\t\t\t\t\t\t\t\t\t\t\t\t# print the field name after the field eg \"PID-5: Patient Name\"\n\t\t\t\t\t\tif field.class == String then\t\t\t\t\t\t\t\t\t# if the field class is a string...\n\t\t\t\t\t\t\tputs field\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# then just print (ie add) the value of the string eg \"PID-7: Date/Time of Birth => 19770621\"\n\t\t\t\t\t\telsif field.class == Array then\t\t\t\t\t\t\t\t# otherwise if the field is an array, ie there is lower level structure...\n\t\t\t\t\t\t\tputs field.inspect\t\t\t\t\t\t\t\t\t\t\t\t\t# then print the structure eg \"PID-5 Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\"\n\t\t\t\t\t\tend # << end if field...\n\t\t\t\t\tend # << end if index > 0 \n\t\t\t\tend\t # << end segment.each_with_index\n\t\t\t\tputs\t\n\t\t end\t # << end @parsed_content.each\n\t \n\t end",
"def end_point; end",
"def claim_loop\n segments = []\n eobs.each_with_index do |eob, index|\n Output835.log.info \"\\n\\n Check number #{eob.check_information.check_number} undergoing processing\"\n Output835.log.info \"\\n\\n Check has #{eob.check_information.insurance_payment_eobs.length} eobs\"\n segments << transaction_set_line_number(index + 1)\n segments << transaction_statistics([eob])\n eob_klass = Output835.class_for(\"SingleStEob\", facility)\n eob_obj = eob_klass.new(eob, facility, payer, index, @element_seperator, @check_num,count(eob)) if eob\n Output835.log.info \"Applying class #{eob_klass}\" if index == 0\n segments += eob_obj.generate\n end\n segments = segments.flatten.compact\n segments unless segments.blank?\n end",
"def processa_eos\n puts(format(\"%<n>4i TRANSACOES\\tEOS\\t\\tINSERIDAS eos \", n: apigm.novax.empty? ? 0 : dml(eost_ins)))\n end",
"def end_pos=(_); end",
"def course_end\n self.course_start + self.course_length - 1\n end",
"def zero_end\n @chr_end\n end"
] | [
"0.5763126",
"0.57605004",
"0.5734976",
"0.55284214",
"0.55141",
"0.5512948",
"0.54200757",
"0.5416026",
"0.54143846",
"0.54108256",
"0.5408269",
"0.5403333",
"0.533853",
"0.5336713",
"0.5336219",
"0.53338856",
"0.5271985",
"0.52717525",
"0.52717525",
"0.5256794",
"0.525457",
"0.523121",
"0.5226574",
"0.5222632",
"0.5222632",
"0.52108264",
"0.52085865",
"0.52051955",
"0.5203114",
"0.51782185",
"0.5176856",
"0.51721686",
"0.5168644",
"0.5163148",
"0.5158978",
"0.5158904",
"0.51491475",
"0.51484364",
"0.5147935",
"0.51141304",
"0.510579",
"0.5096589",
"0.5092229",
"0.5091058",
"0.50787884",
"0.50753015",
"0.5074184",
"0.5074184",
"0.5073901",
"0.5065237",
"0.50513905",
"0.50455666",
"0.5045381",
"0.50427645",
"0.5027127",
"0.50165",
"0.5014271",
"0.5010225",
"0.49961615",
"0.49716747",
"0.4970726",
"0.49678725",
"0.49675",
"0.49658558",
"0.49597225",
"0.49495462",
"0.49464837",
"0.4943655",
"0.49386212",
"0.49374455",
"0.49250382",
"0.49233502",
"0.4920437",
"0.4918718",
"0.49066186",
"0.49066186",
"0.48955357",
"0.48945233",
"0.48940673",
"0.4886659",
"0.48813316",
"0.48784417",
"0.4869507",
"0.48634753",
"0.48611683",
"0.48576072",
"0.48443738",
"0.4842205",
"0.4842205",
"0.4842205",
"0.4842205",
"0.4842205",
"0.4842205",
"0.4842205",
"0.48406345",
"0.4839403",
"0.4834798",
"0.48314086",
"0.48288718",
"0.48282635",
"0.48264155"
] | 0.0 | -1 |
End of N3_PR Segment Details Start of TRN Segment Details | def originating_company_id_trace(*options)
'1000000009'
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def end_line\n attributes.fetch(:endLine)\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def start_line_number; end",
"def start_line_number; end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def ipn_end_point; end",
"def pos_rseg_header\n pos_page_body\n end",
"def segment_0; segments[0]; end",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def segments; end",
"def output_ris_end\n \"\"\n end",
"def end\n attributes.fetch(:end)\n end",
"def start_line_number=(_); end",
"def end_pos; end",
"def end_pos; end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def metadata_start\n 2\n end",
"def end(p0) end",
"def snr\n part.nr\n end",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def ipn_end_point=(_arg0); end",
"def end_line kind\n end",
"def span_start; end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def paragraph_end; end",
"def sub_sector; end",
"def sn\n end",
"def stop\n return self.seq_region_end\n end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def start_line\n attributes.fetch(:startLine)\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def find_page_end\n \n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def starting_position; end",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def end_section\n end",
"def spren; end",
"def parse_footnote_marker; end",
"def involved_segment\n case position % 3\n when 0 then :front\n when 1 then :middle\n when 2 then :back\n end\n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def rl_end_of_line(count, key)\r\n @rl_point = @rl_end\r\n 0\r\n end",
"def pan_last4\r\n params['panLast4']\r\n end",
"def prev_line; end",
"def second_line\n \"#{self.town} #{self.district} #{self.region}\".squeeze(\" \").strip\n end",
"def sub_hl\n end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def stations_in_between2 (end_lane)\n if ($end_connection>$end)\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end+1...$end_connection]\n else\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end_connection+1...$end].reverse\n end #end of the if\n end",
"def pos_header\n @position - 2\n end",
"def claim_loop\n segments = []\n @eobs.each_with_index do |eob, index|\n @check_grouper.last_eob = eob\n @check = eob.check_information\n @job = @check.job\n if @check.micr_line_information && @check.micr_line_information.payer && @facility.details[:micr_line_info]\n @payer = @check.micr_line_information.payer\n else\n @payer = @check.payer\n end\n @facility_output_config = @facility.output_config(@payer.payer_type)\n @reason_codes = nil #this variable is used in child class for configurable section\n @eob = eob\n @claim = eob.claim_information\n @eob_index = index\n @services = eob.service_payment_eobs\n @is_claim_eob = (eob.category.upcase == \"CLAIM\")\n @count = count\n segments << transaction_set_line_number(index + 1)\n segments << transaction_statistics([eob])\n segments += generate_eobs\n end\n segments.flatten.compact\n end",
"def part_of_head?\n return true if self.position == 0\n begin\n if self.nominal?\n # there are no non-nominal segments between given\n # segment and the beginning of its spelling\n gap = false\n self.spelling.segments.each do |prev_segment|\n break if prev_segment == self\n gap = true if !prev_segment.nominal?\n end\n !gap\n end\n rescue Exception => ex\n #puts ex\n false\n end\n end",
"def end_line(kind); end",
"def end_pos=(_); end",
"def last\n trailer_data\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def three_prime_utr_seq\n return self.seq[self.coding_region_cdna_end..-1]\n end",
"def begin_line kind\n end",
"def print_out_line\n\t\t\t#p ['id', id, 'ctd', ctd]\n\t\t\t#p rcp.results.zip(rcp.results.map{|r| send(r)})\n\t\t\tname = @run_name\n\t\t\tname += \" (res: #@restart_id)\" if @restart_id\n\t\t\tname += \" real_id: #@real_id\" if @real_id\n\t\t\tbeginning = sprintf(\"%2d:%d %-60s %1s:%2.1f(%s)\", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s)\n\t\t\tif @status == :Incomplete and @completed_timesteps\n\t\t\t\tbeginning += sprintf(\" %d steps \", @completed_timesteps)\n\t\t\telsif @percent_complete\n \t\t\t\tbeginning+=sprintf(\" %3s%1s \", percent_complete, \"%\")\n\t\t\tend\n\t\t\tif ctd\n\t\t\t\t#beginning += sprintf(\"Q:%f, Pfusion:%f MW, Ti0:%f keV, Te0:%f keV, n0:%f x10^20\", fusionQ, pfus, ti0, te0, ne0)\n\t\t\tend\n\t\t\tbeginning += \" ---#{@comment}\" if @comment\n\t\t\tbeginning\n\t\tend",
"def segment\n return asset_on_segment_history.segment rescue nil\n end",
"def zero_end\n @chr_end\n end",
"def end_point; end",
"def payment_advice_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FTX\\+AAG.*/)\n end",
"def stderrs; end",
"def tr8n_component\n nil\n end",
"def last_line_only(range); end",
"def last_line_only(range); end",
"def last_line_only(range); end",
"def ld_hl_sp_plusr8\n end",
"def missing_end trace\n found :missing_end\n @lexer.undo trace\n end",
"def segments=(_arg0); end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def is_endstr?(); @type == GRT_ENDSTR; end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def start_num\n return @start_num\n end",
"def line_number\n number[6..-1]\n end",
"def start\n @parts.first.start\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def line_index()\n end",
"def input_end_offset\n _response_word.fetch(\"endingPos\", nil)\n end",
"def extract_end_page( existing_payload, solr_doc, fedora_doc )\n\n # for books and articles\n start_page = IngestHelpers.fedora_first_field_extract( fedora_doc, 'mods relatedItem part extent end' )\n return start_page if start_page.present?\n return nil\n end",
"def tagline; end",
"def line_num; end"
] | [
"0.57012254",
"0.5604279",
"0.54739887",
"0.54739887",
"0.5416935",
"0.5403451",
"0.5363855",
"0.5349448",
"0.5348509",
"0.5344419",
"0.53170377",
"0.5304454",
"0.5300531",
"0.5280336",
"0.5280336",
"0.5271441",
"0.5263564",
"0.52548236",
"0.5251289",
"0.52215534",
"0.51985013",
"0.5193878",
"0.5189289",
"0.5179404",
"0.5171048",
"0.5153796",
"0.51421595",
"0.51362973",
"0.51312083",
"0.51220775",
"0.51196367",
"0.51148957",
"0.5114412",
"0.5109244",
"0.5108966",
"0.50855345",
"0.5075043",
"0.50747305",
"0.5073169",
"0.50634444",
"0.5059676",
"0.5058309",
"0.50569934",
"0.5055168",
"0.50366676",
"0.50282526",
"0.5015305",
"0.50152415",
"0.50022197",
"0.49987465",
"0.49985677",
"0.4993235",
"0.49912816",
"0.49579194",
"0.49273264",
"0.4921115",
"0.49101534",
"0.4908572",
"0.4908572",
"0.4908572",
"0.4908572",
"0.4908572",
"0.4908572",
"0.4908572",
"0.4899405",
"0.48993063",
"0.48885062",
"0.48881084",
"0.48861223",
"0.48771495",
"0.48761344",
"0.4867127",
"0.48621157",
"0.48621157",
"0.4860878",
"0.4851982",
"0.4851184",
"0.48371485",
"0.4834551",
"0.48316687",
"0.48309678",
"0.48300594",
"0.48202065",
"0.48151135",
"0.48151135",
"0.48151135",
"0.48124123",
"0.48123845",
"0.48032105",
"0.47977984",
"0.47971618",
"0.47938743",
"0.4785598",
"0.47685212",
"0.47683054",
"0.4765107",
"0.47593015",
"0.4758659",
"0.47552097",
"0.47545964",
"0.47501442"
] | 0.0 | -1 |
End of TRN Segment Details Start of BPR Segment Details | def payment_format(*options)
@is_ach_payment ? 'CCP' : ''
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def end_line\n attributes.fetch(:endLine)\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def end\n attributes.fetch(:end)\n end",
"def stop\n return self.seq_region_end\n end",
"def end_pos; end",
"def end_pos; end",
"def find_page_end\n \n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def segments; end",
"def start_line_number; end",
"def start_line_number; end",
"def span_start; end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def output_ris_end\n \"\"\n end",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def pos_rseg_header\n pos_page_body\n end",
"def end_point; end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def end_line kind\n end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def bac_top_line\n bac = budget_at_completion\n bac_top_line = [[start_date, bac],[end_date_for_top_line, bac]] \n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def ipn_end_point; end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def paragraph_end; end",
"def segment_0; segments[0]; end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def end(p0) end",
"def end_section\n end",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def end_tag\n \"#TESTLAB-END-#{self.bridge.to_s.upcase}\"\n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def start_line_number=(_); end",
"def stations_in_between2 (end_lane)\n if ($end_connection>$end)\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end+1...$end_connection]\n else\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end_connection+1...$end].reverse\n end #end of the if\n end",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def start_line\n attributes.fetch(:startLine)\n end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def starting_position; end",
"def sub_sector; end",
"def rl_end_of_line(count, key)\r\n @rl_point = @rl_end\r\n 0\r\n end",
"def end_line(kind); end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def end\n j_instance.getEnd\n end",
"def exclude_end?() end",
"def end_pos=(_); end",
"def end; self.begin + self.size - 1; end",
"def ends\n form_print(ends_at)\n end",
"def ends\n form_print(ends_at)\n end",
"def ipn_end_point=(_arg0); end",
"def is_endstr?(); @type == GRT_ENDSTR; end",
"def claim_loop\n segments = []\n @eobs.each_with_index do |eob, index|\n @check_grouper.last_eob = eob\n @check = eob.check_information\n @job = @check.job\n if @check.micr_line_information && @check.micr_line_information.payer && @facility.details[:micr_line_info]\n @payer = @check.micr_line_information.payer\n else\n @payer = @check.payer\n end\n @facility_output_config = @facility.output_config(@payer.payer_type)\n @reason_codes = nil #this variable is used in child class for configurable section\n @eob = eob\n @claim = eob.claim_information\n @eob_index = index\n @services = eob.service_payment_eobs\n @is_claim_eob = (eob.category.upcase == \"CLAIM\")\n @count = count\n segments << transaction_set_line_number(index + 1)\n segments << transaction_statistics([eob])\n segments += generate_eobs\n end\n segments.flatten.compact\n end",
"def end\n @history.objects.find { |o| o.name == \"end\" }.val\n end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def end\n @ranges.size == 1 ? @ranges[0].end : nil\n end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def zero_end\n @chr_end\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def end\n regions.max.end\n end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def end_at\n @attributes[:end_at]\n end",
"def end_at\n @attributes[:end_at]\n end",
"def tag_end(*args)\n #puts \"#{@@idpath} #{args[0]}\"\n @@endCount +=1\n @@startCount=0\n @@idpath.pop\n @@curentid = @@path.pop\n end",
"def prev_line; end",
"def extract_end_page( existing_payload, solr_doc, fedora_doc )\n\n # for books and articles\n start_page = IngestHelpers.fedora_first_field_extract( fedora_doc, 'mods relatedItem part extent end' )\n return start_page if start_page.present?\n return nil\n end",
"def sub_hl\n end",
"def sn\n end",
"def payment_advice_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FTX\\+AAG.*/)\n end",
"def end\n empty? ? 0 : last.end\n end",
"def end_point; get(end_param) end",
"def stopped_at; end",
"def stopped_at; end",
"def stopped_at; end",
"def part_of_head?\n return true if self.position == 0\n begin\n if self.nominal?\n # there are no non-nominal segments between given\n # segment and the beginning of its spelling\n gap = false\n self.spelling.segments.each do |prev_segment|\n break if prev_segment == self\n gap = true if !prev_segment.nominal?\n end\n !gap\n end\n rescue Exception => ex\n #puts ex\n false\n end\n end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def last_line_only(range); end",
"def last_line_only(range); end",
"def last_line_only(range); end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def metadata_start\n 2\n end",
"def end_character\n attributes.fetch(:endCharacter)\n end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def start\n return self.seq_region_start\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def input_end_offset\n _response_word.fetch(\"endingPos\", nil)\n end"
] | [
"0.61290276",
"0.58103645",
"0.58057463",
"0.5569222",
"0.55574316",
"0.55574316",
"0.55082184",
"0.54987603",
"0.54856926",
"0.5480722",
"0.5480722",
"0.5470453",
"0.5470246",
"0.54409635",
"0.5424857",
"0.5401535",
"0.5392369",
"0.53892976",
"0.5373312",
"0.5363859",
"0.53621256",
"0.5360007",
"0.5340503",
"0.53153",
"0.53153",
"0.53153",
"0.53153",
"0.5311346",
"0.53033215",
"0.53033215",
"0.5297093",
"0.5292126",
"0.5275684",
"0.5260178",
"0.5254876",
"0.52509856",
"0.5228344",
"0.52265745",
"0.5217843",
"0.51960766",
"0.5193009",
"0.5183222",
"0.5175178",
"0.5158511",
"0.51553285",
"0.51509047",
"0.5150023",
"0.514943",
"0.51473904",
"0.51363856",
"0.51302564",
"0.5125622",
"0.5113156",
"0.5113156",
"0.5112307",
"0.5109002",
"0.5103908",
"0.5099691",
"0.5084746",
"0.508387",
"0.50838596",
"0.50838596",
"0.50838596",
"0.50838596",
"0.50838596",
"0.50838596",
"0.50838596",
"0.5080558",
"0.50791126",
"0.5050573",
"0.5048637",
"0.5044869",
"0.5042054",
"0.5042054",
"0.50412226",
"0.5033318",
"0.5033282",
"0.50287884",
"0.5020473",
"0.5017203",
"0.501531",
"0.50134903",
"0.5011895",
"0.5011895",
"0.5011895",
"0.5008614",
"0.50062317",
"0.5005566",
"0.5005566",
"0.5005566",
"0.49937582",
"0.4982616",
"0.49614105",
"0.49573728",
"0.4952492",
"0.4951285",
"0.49467286",
"0.49467286",
"0.49467286",
"0.49467286",
"0.49418026"
] | 0.0 | -1 |
End of BPR Segment Details Start of REF_F8 Segment Details | def original_reference_number(*options)
get_eob_image.try(:original_file_name)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def b_offset\n @last_4[1].to_i\n end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def frtrefheader(io)\n result = frtheader(io.read(4)) # rt, grbitFrt\n result[:ref8] = ref8u(io.read(8)) # ref8 (8 bytes): A Ref8U that references the range of cells associated with the containing record.\n result\n end",
"def end_line\n attributes.fetch(:endLine)\n end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def sub_hl\n end",
"def ld_hl_sp_plusr8\n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def page_fsp_hdr\n 0\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def get_end(fbe_begin)\n @_buffer.unshift(fbe_begin)\n end",
"def parse_footnote_marker; end",
"def next_fii_bf_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FII\\+BF.*/)\n end",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def output_ris_end\n \"\"\n end",
"def zero_end\n @chr_end\n end",
"def end\n attributes.fetch(:end)\n end",
"def read_end_stops()\n start_command('F81', false, @status_debug_msg)\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def create_end(fbe_begin)\n fbe_end = buffer.size\n fbe_full_size = fbe_end - fbe_begin\n write_uint32(@_model.fbe_offset - 4, fbe_full_size)\n fbe_full_size\n end",
"def full_ref # rubocop:disable Metrics/AbcSize\n @full_ref ||= begin\n ref = \"#{refparts[:perfix]}#{refparts[:series]} #{refparts[:code]}\"\n ref += \"pt#{refparts[:prt]}\" if refparts[:prt] # long_to_short(refparts, \"prt\").to_s\n ref += \"ver#{refparts[:ver]}\" if refparts[:ver] # long_to_short(refparts, \"vol\").to_s\n ref += \"v#{refparts[:vol]}\" if refparts[:vol]\n ref\n end\n end",
"def parse_footnote_definition; end",
"def find_page_end\n \n end",
"def pos_fsp_header\n pos_fil_header + size_fil_header\n end",
"def end(p0) end",
"def boundary \n \"An43094fu\"\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def last_defined_frame\n last_f = @segments[-1].start_frame\n return 100 if last_f == POS_INF\n return last_f\n end",
"def pos_fil_header\n 0\n end",
"def segment_0; segments[0]; end",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def size_fseg_header\n 2 * Innodb::FsegEntry::SIZE\n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def end_line kind\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def parse_eob_marker; end",
"def end_pos; end",
"def end_pos; end",
"def inspect_details\n\t\t\treturn %Q{FIN:%d RSV1:%d RSV2:%d RSV3:%d OPCODE:%s (0x%x) -- %0.2fK body} % [\n\t\t\t\tself.fin? ? 1 : 0,\n\t\t\t\tself.rsv1? ? 1 : 0,\n\t\t\t\tself.rsv2? ? 1 : 0,\n\t\t\t\tself.rsv3? ? 1 : 0,\n\t\t\t\tself.opcode,\n\t\t\t\tself.numeric_opcode,\n\t\t\t\t(self.payload.size / 1024.0),\n\t\t\t]\n\t\tend",
"def sub_sector; end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def incomplete\r\n\r\n end",
"def pos_rseg_header\n pos_page_body\n end",
"def last\n trailer_data\n end",
"def frame_description(b); end",
"def end_line(kind); end",
"def add_sp_r8\n end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def fseg_header\n @fseg_header ||= cursor(pos_fseg_header).name(\"fseg\") do |c|\n FsegHeader.new(\n leaf: c.name(\"fseg[leaf]\") { Innodb::FsegEntry.get_inode(@space, c) },\n internal: c.name(\"fseg[internal]\") { Innodb::FsegEntry.get_inode(@space, c) }\n )\n end\n end",
"def pos_partial_page_header\n pos_fil_header + 4\n end",
"def get_begin\n if (@_buffer.offset + fbe_offset + fbe_size) > @_buffer.size\n return 0\n end\n\n fbe_struct_offset = read_uint32(fbe_offset)\n if (fbe_struct_offset == 0) || ((@_buffer.offset + fbe_struct_offset + 4 + 4) > @_buffer.size)\n return 0\n end\n\n fbe_struct_size = read_uint32(fbe_struct_offset)\n if fbe_struct_size < (4 + 4)\n return 0\n end\n\n @_buffer.shift(fbe_struct_offset)\n fbe_struct_offset\n end",
"def get_begin\n if (@_buffer.offset + fbe_offset + fbe_size) > @_buffer.size\n return 0\n end\n\n fbe_struct_offset = read_uint32(fbe_offset)\n if (fbe_struct_offset == 0) || ((@_buffer.offset + fbe_struct_offset + 4 + 4) > @_buffer.size)\n return 0\n end\n\n fbe_struct_size = read_uint32(fbe_struct_offset)\n if fbe_struct_size < (4 + 4)\n return 0\n end\n\n @_buffer.shift(fbe_struct_offset)\n fbe_struct_offset\n end",
"def get_begin\n if (@_buffer.offset + fbe_offset + fbe_size) > @_buffer.size\n return 0\n end\n\n fbe_struct_offset = read_uint32(fbe_offset)\n if (fbe_struct_offset == 0) || ((@_buffer.offset + fbe_struct_offset + 4 + 4) > @_buffer.size)\n return 0\n end\n\n fbe_struct_size = read_uint32(fbe_struct_offset)\n if fbe_struct_size < (4 + 4)\n return 0\n end\n\n @_buffer.shift(fbe_struct_offset)\n fbe_struct_offset\n end",
"def get_begin\n if (@_buffer.offset + fbe_offset + fbe_size) > @_buffer.size\n return 0\n end\n\n fbe_struct_offset = read_uint32(fbe_offset)\n if (fbe_struct_offset == 0) || ((@_buffer.offset + fbe_struct_offset + 4 + 4) > @_buffer.size)\n return 0\n end\n\n fbe_struct_size = read_uint32(fbe_struct_offset)\n if fbe_struct_size < (4 + 4)\n return 0\n end\n\n @_buffer.shift(fbe_struct_offset)\n fbe_struct_offset\n end",
"def stop\n return self.seq_region_end\n end",
"def bgnstr_record() @records.get(GRT_BGNSTR); end",
"def cp_hl\n end",
"def fil_trailer\n @fil_trailer ||= cursor(pos_fil_trailer).name(\"fil_trailer\") do |c|\n {\n :checksum => c.name(\"checksum\") { c.get_uint32 },\n :lsn_low32 => c.name(\"lsn_low32\") { c.get_uint32 },\n }\n end\n end",
"def end_section\n end",
"def fsp_header\n @fsp_header ||= cursor(pos_fsp_header).name(\"fsp\") do |c|\n {\n :space_id => c.name(\"space_id\") { c.get_uint32 },\n :unused => c.name(\"unused\") { c.get_uint32 },\n :size => c.name(\"size\") { c.get_uint32 },\n :free_limit => c.name(\"free_limit\") { c.get_uint32 },\n :flags => c.name(\"flags\") { \n self.class.decode_flags(c.get_uint32)\n },\n :frag_n_used => c.name(\"frag_n_used\") { c.get_uint32 },\n :free => c.name(\"list[free]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :free_frag => c.name(\"list[free_frag]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :full_frag => c.name(\"list[full_frag]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :first_unused_seg => c.name(\"first_unused_seg\") { c.get_uint64 },\n :full_inodes => c.name(\"list[full_inodes]\") {\n Innodb::List::Inode.new(@space, Innodb::List.get_base_node(c))\n },\n :free_inodes => c.name(\"list[free_inodes]\") {\n Innodb::List::Inode.new(@space, Innodb::List.get_base_node(c))\n },\n }\n end\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def end_field; end",
"def end_point; end",
"def offset\n fil_header[:offset]\n end",
"def cp_d8\n end",
"def extract_end_page( existing_payload, solr_doc, fedora_doc )\n\n # for books and articles\n start_page = IngestHelpers.fedora_first_field_extract( fedora_doc, 'mods relatedItem part extent end' )\n return start_page if start_page.present?\n return nil\n end",
"def is_endstr?(); @type == GRT_ENDSTR; end",
"def pos_fil_trailer\n size - size_fil_trailer\n end",
"def prev_line; end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def last_block\n\t\t(self.track_count*8)-1\n\tend",
"def bic_with_branch_code\n if self.bic.length == 8\n return \"#{self.bic}XXX\"\n else\n return self.bic\n end\n end",
"def bf\n field_fetch('BF')\n end",
"def bgnstr() @records.get_data(GRT_BGNSTR); end",
"def read_details\r\n read_detail while peek == 'LX'\r\n self\r\n end",
"def last_sig; end",
"def Com6 # Recuperation Info\n \n end",
"def end_of_section\n\t$report_file.puts(\"-------END OF SECTION-------\")\n\t$report_file.puts\nend",
"def end_tag\n \"#TESTLAB-END-#{self.bridge.to_s.upcase}\"\n end",
"def read_end_stops()\n @ramps_arduino.execute_command('F81', false, @status_debug_msg)\n end",
"def metadata_start\n 2\n end",
"def start_line_number; end",
"def start_line_number; end",
"def tag_end(*args)\n #puts \"#{@@idpath} #{args[0]}\"\n @@endCount +=1\n @@startCount=0\n @@idpath.pop\n @@curentid = @@path.pop\n end",
"def form_end\n ''\n end",
"def local_header_offset; end",
"def strand; @data[8]; end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end"
] | [
"0.5860463",
"0.575525",
"0.5515742",
"0.55073863",
"0.5480592",
"0.5452496",
"0.5342762",
"0.53357714",
"0.5328968",
"0.5327417",
"0.52869266",
"0.52869266",
"0.52869266",
"0.52869266",
"0.5276431",
"0.5270954",
"0.52605766",
"0.52424914",
"0.523367",
"0.52047014",
"0.519582",
"0.51872814",
"0.51872814",
"0.51872814",
"0.51872814",
"0.51588285",
"0.5155766",
"0.5150274",
"0.5144718",
"0.5142823",
"0.5137156",
"0.5130256",
"0.5129609",
"0.5113987",
"0.50825876",
"0.50707346",
"0.50615865",
"0.5059626",
"0.5050874",
"0.50448656",
"0.50408196",
"0.5036286",
"0.5025685",
"0.5025685",
"0.5025362",
"0.5004375",
"0.50042725",
"0.49743998",
"0.49724463",
"0.49562347",
"0.49499276",
"0.4949208",
"0.4948064",
"0.49435025",
"0.49435025",
"0.49435025",
"0.49435025",
"0.49435025",
"0.49435025",
"0.49435025",
"0.4939989",
"0.49201235",
"0.49200395",
"0.49200395",
"0.49200395",
"0.49200395",
"0.49192375",
"0.49077457",
"0.4904826",
"0.49013177",
"0.49013153",
"0.48974162",
"0.4895397",
"0.4893478",
"0.4891828",
"0.48777923",
"0.48681146",
"0.4866079",
"0.48648828",
"0.48634264",
"0.4859424",
"0.48473796",
"0.4847351",
"0.4844638",
"0.48368803",
"0.48334843",
"0.48282617",
"0.48153853",
"0.48135066",
"0.4807419",
"0.47979587",
"0.47905046",
"0.47821078",
"0.47750127",
"0.47625563",
"0.47625563",
"0.47557667",
"0.47550225",
"0.475261",
"0.47491798",
"0.47490677"
] | 0.0 | -1 |
End of REF_F8 Segment Details Start of CLP Segment Details | def claim_payment_amount(*options)
@eob.amount('total_amount_paid_for_claim')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ld_hl_sp_plusr8\n end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def cp_hl\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def sub_hl\n end",
"def seek_section_head\n @psd_file.seek(@parser.color_mode_data.termination_pos, IO::SEEK_SET)\n end",
"def page_fsp_hdr\n 0\n end",
"def segment_0; segments[0]; end",
"def end(p0) end",
"def pos_fsp_header\n pos_fil_header + size_fil_header\n end",
"def sub_sector; end",
"def end_line\n attributes.fetch(:endLine)\n end",
"def parse_footnote_marker; end",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def pos_fil_header\n 0\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def full_ref # rubocop:disable Metrics/AbcSize\n @full_ref ||= begin\n ref = \"#{refparts[:perfix]}#{refparts[:series]} #{refparts[:code]}\"\n ref += \"pt#{refparts[:prt]}\" if refparts[:prt] # long_to_short(refparts, \"prt\").to_s\n ref += \"ver#{refparts[:ver]}\" if refparts[:ver] # long_to_short(refparts, \"vol\").to_s\n ref += \"v#{refparts[:vol]}\" if refparts[:vol]\n ref\n end\n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def start_line_number; end",
"def start_line_number; end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def pos_partial_page_header\n pos_fil_header + 4\n end",
"def read_end_stops()\n start_command('F81', false, @status_debug_msg)\n end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def end_section\n end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def zero_end\n @chr_end\n end",
"def end_line kind\n end",
"def segments; end",
"def fseg_header\n @fseg_header ||= cursor(pos_fseg_header).name(\"fseg\") do |c|\n FsegHeader.new(\n leaf: c.name(\"fseg[leaf]\") { Innodb::FsegEntry.get_inode(@space, c) },\n internal: c.name(\"fseg[internal]\") { Innodb::FsegEntry.get_inode(@space, c) }\n )\n end\n end",
"def cp_d8\n end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def read_details\r\n read_detail while peek == 'LX'\r\n self\r\n end",
"def end_pos; end",
"def end_pos; end",
"def add_sp_r8\n end",
"def local_header_offset; end",
"def pos_rseg_header\n pos_page_body\n end",
"def parse_footnote_definition; end",
"def last_defined_frame\n last_f = @segments[-1].start_frame\n return 100 if last_f == POS_INF\n return last_f\n end",
"def end_line(kind); end",
"def prev_line; end",
"def frtrefheader(io)\n result = frtheader(io.read(4)) # rt, grbitFrt\n result[:ref8] = ref8u(io.read(8)) # ref8 (8 bytes): A Ref8U that references the range of cells associated with the containing record.\n result\n end",
"def end_of_section\n\t$report_file.puts(\"-------END OF SECTION-------\")\n\t$report_file.puts\nend",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def add_hl_sp\n end",
"def inspect_details\n\t\t\treturn %Q{FIN:%d RSV1:%d RSV2:%d RSV3:%d OPCODE:%s (0x%x) -- %0.2fK body} % [\n\t\t\t\tself.fin? ? 1 : 0,\n\t\t\t\tself.rsv1? ? 1 : 0,\n\t\t\t\tself.rsv2? ? 1 : 0,\n\t\t\t\tself.rsv3? ? 1 : 0,\n\t\t\t\tself.opcode,\n\t\t\t\tself.numeric_opcode,\n\t\t\t\t(self.payload.size / 1024.0),\n\t\t\t]\n\t\tend",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def fsp_header\n @fsp_header ||= cursor(pos_fsp_header).name(\"fsp\") do |c|\n {\n :space_id => c.name(\"space_id\") { c.get_uint32 },\n :unused => c.name(\"unused\") { c.get_uint32 },\n :size => c.name(\"size\") { c.get_uint32 },\n :free_limit => c.name(\"free_limit\") { c.get_uint32 },\n :flags => c.name(\"flags\") { \n self.class.decode_flags(c.get_uint32)\n },\n :frag_n_used => c.name(\"frag_n_used\") { c.get_uint32 },\n :free => c.name(\"list[free]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :free_frag => c.name(\"list[free_frag]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :full_frag => c.name(\"list[full_frag]\") {\n Innodb::List::Xdes.new(@space, Innodb::List.get_base_node(c))\n },\n :first_unused_seg => c.name(\"first_unused_seg\") { c.get_uint64 },\n :full_inodes => c.name(\"list[full_inodes]\") {\n Innodb::List::Inode.new(@space, Innodb::List.get_base_node(c))\n },\n :free_inodes => c.name(\"list[free_inodes]\") {\n Innodb::List::Inode.new(@space, Innodb::List.get_base_node(c))\n },\n }\n end\n end",
"def begin_line(kind); end",
"def begin_line(kind); end",
"def metadata_start\n 2\n end",
"def ipn_end_point=(_arg0); end",
"def size_fseg_header\n 2 * Innodb::FsegEntry::SIZE\n end",
"def begin_line kind\n end",
"def output_ris_end\n \"\"\n end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def start_line_number=(_); end",
"def offset\n fil_header[:offset]\n end",
"def show_program_h_offset\n\t\t\tputs \" Start of program headers: #{@elf_program_h_offset.to_h} (bytes into file)\"\n\t\tend",
"def lookahead_section_header(start_ofs, end_ofs)\n section_ofs = lookahead_u4(start_ofs, end_ofs)\n section_size = lookahead_u4(start_ofs+4, end_ofs)\n if section_ofs == start_ofs and section_size >= 8 and start_ofs + section_size <= end_ofs\n section_size\n else\n nil\n end\n rescue LookaheadFailure\n nil\n end",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def tell; @next_block_pos end",
"def end_point; end",
"def paragraph_end; end",
"def pos_fil_trailer\n size - size_fil_trailer\n end",
"def record_header_redundant_field_end_offsets(header, cursor)\n header.n_fields.times.map do |n|\n cursor.name(\"field_end_offset[#{n}]\") { cursor.read_uint_by_size(header.offset_size) }\n end\n end",
"def current_offset; end",
"def segments\n if magic32?\n command(:LC_SEGMENT)\n else\n command(:LC_SEGMENT_64)\n end\n end",
"def fil_trailer\n @fil_trailer ||= cursor(pos_fil_trailer).name(\"fil_trailer\") do |c|\n {\n :checksum => c.name(\"checksum\") { c.get_uint32 },\n :lsn_low32 => c.name(\"lsn_low32\") { c.get_uint32 },\n }\n end\n end",
"def context_type_event_detailed_800_header\n ContextTypeDef.new(\n :event_detailed_800_header,\n [\n /\\s*(POS)\\s+(COGNOME E NOME)\\s+(NAZ)\\s+(\\d{2,4} m\\s+){8}\\s*(ARRIVO)/i,\n /\\s*SOCIETA'\\s+(ANNO)/i,\n /\\s*(800) *(m|metri)? *(stile|SL|ST)/i\n ]\n )\n end",
"def ipn_end_point; end",
"def pos_header\n @position - 2\n end",
"def stop\n return self.seq_region_end\n end",
"def next_fii_bf_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FII\\+BF.*/)\n end",
"def incomplete\r\n\r\n end",
"def inspect\n \"%-40s @ 0x%08x [%d kB] => %s\" % [self.path.yellow, off_len_low, data_len_low>>10, main_type]\n end",
"def inspect\n \"%-40s @ 0x%08x [%d kB] => %s\" % [self.path.yellow, off_len_low, data_len_low>>10, main_type]\n end",
"def last\n trailer_data\n end",
"def boundary \n \"An43094fu\"\n end",
"def inc_hl\n end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def starting_position; end",
"def rl_end_of_line(count, key)\r\n @rl_point = @rl_end\r\n 0\r\n end",
"def Com6 # Recuperation Info\n \n end",
"def end\n attributes.fetch(:end)\n end",
"def first_offset; end",
"def first_offset; end",
"def section; end",
"def cursor_end\n blen = @buffer.rstrip.length\n if blen < @width\n set_form_col blen\n else\n # there is a problem here FIXME. \n @pcol = blen-@width\n #set_form_col @width-1\n set_form_col blen\n end\n @curpos = blen # this is position in array where editing or motion is to happen regardless of what you see\n # regardless of pcol (panning)\n # $log.debug \" crusor END cp:#{@curpos} pcol:#{@pcol} b.l:#{@buffer.length} d_l:#{@width} fc:#{@form.col}\"\n #set_form_col @buffer.length\n end"
] | [
"0.5818006",
"0.5657622",
"0.5643015",
"0.54958326",
"0.54188406",
"0.54075307",
"0.537706",
"0.53250694",
"0.53011256",
"0.52572614",
"0.5251237",
"0.5222466",
"0.5220437",
"0.52007854",
"0.51944447",
"0.51516056",
"0.5142462",
"0.51390076",
"0.5134795",
"0.5127802",
"0.51111823",
"0.51111823",
"0.5101404",
"0.5088267",
"0.5078492",
"0.50781345",
"0.50775284",
"0.5066276",
"0.50611836",
"0.50557065",
"0.5055569",
"0.5032392",
"0.5030214",
"0.5026826",
"0.5026743",
"0.5026365",
"0.50185955",
"0.50169164",
"0.50169164",
"0.5012432",
"0.5011293",
"0.5005303",
"0.49959028",
"0.49890023",
"0.4981422",
"0.49788037",
"0.4970538",
"0.4959692",
"0.49587587",
"0.4951267",
"0.49336857",
"0.4929751",
"0.492863",
"0.49207628",
"0.48991105",
"0.48991105",
"0.48975396",
"0.4892043",
"0.4887886",
"0.48873273",
"0.48856732",
"0.48774457",
"0.48774457",
"0.48774457",
"0.48774457",
"0.48774457",
"0.48774457",
"0.48774457",
"0.4877024",
"0.48657358",
"0.4864026",
"0.48594525",
"0.48539475",
"0.48454377",
"0.4834778",
"0.4831711",
"0.48284158",
"0.48140365",
"0.48113534",
"0.48061666",
"0.47945887",
"0.47933197",
"0.4791909",
"0.4785566",
"0.47821894",
"0.4782157",
"0.4772407",
"0.47675925",
"0.47675925",
"0.47656465",
"0.47649407",
"0.47594258",
"0.47573388",
"0.47572106",
"0.47555256",
"0.47468475",
"0.47431228",
"0.47405103",
"0.47405103",
"0.4735443",
"0.47330365"
] | 0.0 | -1 |
End of CLP Segment Details Start of NM1_82 Segment Details | def rendering_provider_name_suffix(*options)
blank_segment
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def end_line\n attributes.fetch(:endLine)\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def segments; end",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def segment_0; segments[0]; end",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def sub_sector; end",
"def end_pos; end",
"def end_pos; end",
"def end(p0) end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def ipn_end_point; end",
"def segments\n if magic32?\n command(:LC_SEGMENT)\n else\n command(:LC_SEGMENT_64)\n end\n end",
"def start_line_number; end",
"def start_line_number; end",
"def end_footnote\n @mode = @mode.split('|').last\n end",
"def zero_end\n @chr_end\n end",
"def coding_region_cdna_start\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.start_exon\n answer += self.translation.seq_start\n return answer\n else\n answer += exon.length\n end\n end\n \n end",
"def output_ris_end\n \"\"\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def ipn_end_point=(_arg0); end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def sub_hl\n end",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def print_mc\n if @minimal_segment_count == segment_count+1\n print \"0\\n\";\n else\n print \"#{@minimal_segment_count}\\n\"\n for i in 0..segment_count-1\n if is_in_mc?(i)\n get_segment(i).to_s2\n end\n end\n end\n end",
"def parse_footnote_marker; end",
"def stations_in_between2 (end_lane)\n if ($end_connection>$end)\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end+1...$end_connection]\n else\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end_connection+1...$end].reverse\n end #end of the if\n end",
"def end\n attributes.fetch(:end)\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def cp_hl\n end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def segment_name()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.DetailedTimerComponentState_segment_name(@handle.ptr)\n result\n end",
"def segments=(_arg0); end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def print_known_cidr_blks_asce\n\t\tputs \"\\nIndex of known CIDR Net blocks in Ascending Order:\"\n\t\tputs @known_cidr_blks_asce_index\n\t\tputs \"End of the Index\"\n\tend",
"def end_line kind\n end",
"def stop\n return self.seq_region_end\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def end_section\n end",
"def read_end_stops()\n start_command('F81', false, @status_debug_msg)\n end",
"def start_line_number=(_); end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def paragraph_end; end",
"def ld_hl_sp_plusr8\n end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def details\n response = cs_get \"/segments/#{segment_id}.json\", {}\n Hashie::Mash.new(response)\n end",
"def course_end\n self.course_start + self.course_length - 1\n end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def metadata_start\n 2\n end",
"def end_line(kind); end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def input_end_offset\n _response_word.fetch(\"endingPos\", nil)\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def pos_rseg_header\n pos_page_body\n end",
"def print_segments\n\t\t# METHOD: after all of the HL7 content has been parsed, print the contents of each segment in a more easily readible format\n\t\t# output for 1 segment looks like:\n\t\t\t\t\t#~ :: Segment: PID\n\t\t\t\t\t#~ PID-0: Segment => PID\n\t\t\t\t\t#~ PID-1: Set ID - PID => 1\n\t\t\t\t\t#~ PID-2: Patient ID => \n\t\t\t\t\t#~ PID-3: Patient Identifier List => [[\"P00057804\", \"\", \"\", \"\", \"PN\"], [\"4009887514\", \"\", \"\", \"AUSHIC\", \"MC\"], [\"SMIAL001\", \"\", \"\", \"\", \"PI\"]]\n\t\t\t\t\t#~ PID-4: Alternate Patient ID - PID => \n\t\t\t\t\t#~ PID-5: Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\n\t\t\t\t\t#~ PID-6: Mother’s Maiden Name => \n\t\t\t\t\t#~ PID-7: Date/Time of Birth => 19770621\n\t\t\t\t\t#~ PID-8: Sex => M\n\t\t\t\t\t#~ PID-9: Patient Alias => \n\t\t\t\t\t#~ PID-10: Race => \n\t\t\t\t\t#~ PID-11: Patient Address => [\"818 Beach Road\", \"\", \"BEECHMERE\", \"\", \"4510\", \"AU\", \"H\"]\n\n\t\t\t# iterate over each segment\n\t\t\t@parsed_content.each do |segment|\n\t\t\t\tseg = segment[0]\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # eg => \"PID\"\n\t\t\t\t\n\t\t\t\t#get yaml file details\n\t\t\t\tyamlfile = \"hl7specification/#{seg}\"\t\t\t\t\t# for each segment, find the appropriate yaml file (ie one for each segment)\n\t\t\t\tspecs = YAML.load_file(yamlfile)\t\t\t\t\t\t\t# load the yaml file\n\t\t\t\t\n\t\t\t puts \":: #{specs[\"Header\"][\"name\"]} (#{seg})\"\t\t\t# print the text eg \":: Message Header Segment (MSH)\"\n\t\t\t \n\t\t\t # then iterate over each field in the particular segment\n\t\t\t\tsegment.each_with_index do |field, index|\t\t\t\t\t# then for each field...\n\t\t\t\t\tif index > 0 then\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only if the index is 1 or more (ie the first value is not useful here)\n\t\t\t\t\t\tfld = \"#{seg}-#{index}\"\t\t\t\t\t\t\t\t\t # get the field id => \"PID-5\"\n\t\t\t\t\t\tprint \" #{fld}: \"\t\t\t\t\t\t \t\t\t\t\t\t# on each line print the particular field being queried eg \"PID-5: \"\n\t\t\t\t\t\tfldname = specs[fld][\"name\"]\t\t\t\t\t\t\t\t\t# get the name of the field from the yaml file\n\t\t\t\t\t\tprint \"#{fldname} => \"\t\t\t\t\t\t\t\t\t\t\t\t# print the field name after the field eg \"PID-5: Patient Name\"\n\t\t\t\t\t\tif field.class == String then\t\t\t\t\t\t\t\t\t# if the field class is a string...\n\t\t\t\t\t\t\tputs field\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# then just print (ie add) the value of the string eg \"PID-7: Date/Time of Birth => 19770621\"\n\t\t\t\t\t\telsif field.class == Array then\t\t\t\t\t\t\t\t# otherwise if the field is an array, ie there is lower level structure...\n\t\t\t\t\t\t\tputs field.inspect\t\t\t\t\t\t\t\t\t\t\t\t\t# then print the structure eg \"PID-5 Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\"\n\t\t\t\t\t\tend # << end if field...\n\t\t\t\t\tend # << end if index > 0 \n\t\t\t\tend\t # << end segment.each_with_index\n\t\t\t\tputs\t\n\t\t end\t # << end @parsed_content.each\n\t \n\t end",
"def segment\n return asset_on_segment_history.segment rescue nil\n end",
"def adjust_sp_binary()\n \"@SP\\nM=M-1\\n@SP\\nA=M\\nD=M\\n@SP\\nM=M-1\\nA=M\\n\"\n end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def last_line; end",
"def sn\n end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def profiling_command_end\n ptr = MemoryPointer::new( :cl_ulong )\n error = OpenCL.clGetEventProfilingInfo(self, PROFILING_COMMAND_END, ptr.size, ptr, nil )\n error_check(error)\n return ptr.read_cl_ulong\n end",
"def start_line\n attributes.fetch(:startLine)\n end",
"def end_character\n attributes.fetch(:endCharacter)\n end",
"def Segment\n A = 1\n B = 2\n C = 4\n D = 8\n E = 16\n F = 32\n G = 64\n H = 128\n J = 256\n K = 512\n M = 1024\n N = 2048\n P = 4096\n\n def initialize(segments=0)\n @segments = segments\n end\n\n def s(seg=Segment.new)\n Segment.new seg.segments + SOUTH\n end\nend",
"def tail_code_line(cm, start)\n \n iseq = cm.iseq\n \n i = start\n fin = cm.lines.last\n while i < fin\n line_no = cm.line_from_ip(i)\n return line_no if line_no > 0\n op = iseq[i]\n case op\n when OP_GOTO_IF_TRUE, OP_GOTO_IF_FALSE, OP_GOTO\n when OP_GOTO\n i = iseq[i+1]\n when OP_RET\n return -2\n when nil\n return -1\n else\n # Rubinius is getting an error here sometimes. Need to figure\n # out what's wrong.\n begin\n op = Rubinius::InstructionSet[op]\n rescue TypeError\n return -1\n end\n i += (op.arg_count + 1)\n end\n end\n return 0\n end",
"def cds_seq\n cds_length = self.coding_region_cdna_end - self.coding_region_cdna_start + 1\n \n return self.seq[(self.coding_region_cdna_start - 1), cds_length]\n end",
"def parse_footnote_definition; end",
"def span_start; end",
"def end_point; end",
"def get_stop_details(line)\nx = $subway_lines\n for i in x do\n if line == i[:line]\nline_details = i[:stops]\n end\nend\nline_details\nend",
"def find_page_end\n \n end",
"def last_line_in_view()\n\t\tcoord_to_line_num(contents_y + height)\n\tend",
"def parse_eob_marker; end",
"def line_after\n line[column..-1].to_s\n end",
"def end_pos=(_); end",
"def starting_position; end",
"def inspect\n \"#{@begin.inspect}#{@excl ? \"...\" : \"..\"}#{@end.inspect}\"\n end",
"def tag_end(*args)\n #puts \"#{@@idpath} #{args[0]}\"\n @@endCount +=1\n @@startCount=0\n @@idpath.pop\n @@curentid = @@path.pop\n end",
"def street_suffix; end",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def rl_end_of_line(count, key)\r\n @rl_point = @rl_end\r\n 0\r\n end",
"def to_s\n @segment\n end",
"def satellitenumber\n @line1[02...07].to_i\n end",
"def second_line\n \"#{self.town} #{self.district} #{self.region}\".squeeze(\" \").strip\n end"
] | [
"0.58320814",
"0.5785247",
"0.57065535",
"0.5654377",
"0.562356",
"0.559991",
"0.5580909",
"0.5546707",
"0.5503432",
"0.5459237",
"0.54423064",
"0.54423064",
"0.54371065",
"0.5424614",
"0.5419647",
"0.5414141",
"0.5395217",
"0.5395217",
"0.53616524",
"0.53274345",
"0.5325603",
"0.53132737",
"0.53091687",
"0.53027314",
"0.5294688",
"0.5294635",
"0.529343",
"0.5276805",
"0.5272987",
"0.5263512",
"0.5254533",
"0.5251431",
"0.52465",
"0.5228343",
"0.5226971",
"0.52215016",
"0.51984006",
"0.51970303",
"0.5189743",
"0.51884365",
"0.51864207",
"0.5180523",
"0.51736903",
"0.51682526",
"0.5162532",
"0.5162532",
"0.5147506",
"0.5141164",
"0.5120684",
"0.51187456",
"0.51016486",
"0.5101099",
"0.51007515",
"0.5095391",
"0.5087076",
"0.5084846",
"0.5078832",
"0.50678945",
"0.50621223",
"0.50576544",
"0.50499684",
"0.5036745",
"0.5034529",
"0.50237936",
"0.50233936",
"0.50223213",
"0.5014114",
"0.50069183",
"0.50069183",
"0.50069183",
"0.50069183",
"0.50069183",
"0.50069183",
"0.50069183",
"0.5005106",
"0.5003613",
"0.49976176",
"0.49952763",
"0.4975737",
"0.49689525",
"0.49672657",
"0.4947974",
"0.49454325",
"0.49396908",
"0.49360538",
"0.4934067",
"0.49318254",
"0.4929614",
"0.4928941",
"0.49244356",
"0.49242115",
"0.4922838",
"0.4903103",
"0.4896564",
"0.489036",
"0.488808",
"0.48759484",
"0.48748115",
"0.4859895",
"0.48588312",
"0.48574218"
] | 0.0 | -1 |
End of NM1_82 Segment Details Start of DTM_232 Segment Details | def claim_statement_period_start(*options)
claim_start_date = @classified_eob.get_date_for_netwrx(:start_date, @claim)
return nil if claim_start_date.nil?
return claim_start_date if @classified_eob.is_claim_eob? && claim_start_date.eql?('00000000')
claim_start_date if can_print_service_date(claim_start_date)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def segment_0; segments[0]; end",
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def segments; end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def read_end_stops()\n start_command('F81', false, @status_debug_msg)\n end",
"def zero_end\n @chr_end\n end",
"def segments\n if magic32?\n command(:LC_SEGMENT)\n else\n command(:LC_SEGMENT_64)\n end\n end",
"def stations_in_between2 (end_lane)\n if ($end_connection>$end)\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end+1...$end_connection]\n else\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end_connection+1...$end].reverse\n end #end of the if\n end",
"def satellitenumber\n @line1[02...07].to_i\n end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def end_line\n attributes.fetch(:endLine)\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def print_segments\n\t\t# METHOD: after all of the HL7 content has been parsed, print the contents of each segment in a more easily readible format\n\t\t# output for 1 segment looks like:\n\t\t\t\t\t#~ :: Segment: PID\n\t\t\t\t\t#~ PID-0: Segment => PID\n\t\t\t\t\t#~ PID-1: Set ID - PID => 1\n\t\t\t\t\t#~ PID-2: Patient ID => \n\t\t\t\t\t#~ PID-3: Patient Identifier List => [[\"P00057804\", \"\", \"\", \"\", \"PN\"], [\"4009887514\", \"\", \"\", \"AUSHIC\", \"MC\"], [\"SMIAL001\", \"\", \"\", \"\", \"PI\"]]\n\t\t\t\t\t#~ PID-4: Alternate Patient ID - PID => \n\t\t\t\t\t#~ PID-5: Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\n\t\t\t\t\t#~ PID-6: Mother’s Maiden Name => \n\t\t\t\t\t#~ PID-7: Date/Time of Birth => 19770621\n\t\t\t\t\t#~ PID-8: Sex => M\n\t\t\t\t\t#~ PID-9: Patient Alias => \n\t\t\t\t\t#~ PID-10: Race => \n\t\t\t\t\t#~ PID-11: Patient Address => [\"818 Beach Road\", \"\", \"BEECHMERE\", \"\", \"4510\", \"AU\", \"H\"]\n\n\t\t\t# iterate over each segment\n\t\t\t@parsed_content.each do |segment|\n\t\t\t\tseg = segment[0]\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # eg => \"PID\"\n\t\t\t\t\n\t\t\t\t#get yaml file details\n\t\t\t\tyamlfile = \"hl7specification/#{seg}\"\t\t\t\t\t# for each segment, find the appropriate yaml file (ie one for each segment)\n\t\t\t\tspecs = YAML.load_file(yamlfile)\t\t\t\t\t\t\t# load the yaml file\n\t\t\t\t\n\t\t\t puts \":: #{specs[\"Header\"][\"name\"]} (#{seg})\"\t\t\t# print the text eg \":: Message Header Segment (MSH)\"\n\t\t\t \n\t\t\t # then iterate over each field in the particular segment\n\t\t\t\tsegment.each_with_index do |field, index|\t\t\t\t\t# then for each field...\n\t\t\t\t\tif index > 0 then\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only if the index is 1 or more (ie the first value is not useful here)\n\t\t\t\t\t\tfld = \"#{seg}-#{index}\"\t\t\t\t\t\t\t\t\t # get the field id => \"PID-5\"\n\t\t\t\t\t\tprint \" #{fld}: \"\t\t\t\t\t\t \t\t\t\t\t\t# on each line print the particular field being queried eg \"PID-5: \"\n\t\t\t\t\t\tfldname = specs[fld][\"name\"]\t\t\t\t\t\t\t\t\t# get the name of the field from the yaml file\n\t\t\t\t\t\tprint \"#{fldname} => \"\t\t\t\t\t\t\t\t\t\t\t\t# print the field name after the field eg \"PID-5: Patient Name\"\n\t\t\t\t\t\tif field.class == String then\t\t\t\t\t\t\t\t\t# if the field class is a string...\n\t\t\t\t\t\t\tputs field\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# then just print (ie add) the value of the string eg \"PID-7: Date/Time of Birth => 19770621\"\n\t\t\t\t\t\telsif field.class == Array then\t\t\t\t\t\t\t\t# otherwise if the field is an array, ie there is lower level structure...\n\t\t\t\t\t\t\tputs field.inspect\t\t\t\t\t\t\t\t\t\t\t\t\t# then print the structure eg \"PID-5 Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\"\n\t\t\t\t\t\tend # << end if field...\n\t\t\t\t\tend # << end if index > 0 \n\t\t\t\tend\t # << end segment.each_with_index\n\t\t\t\tputs\t\n\t\t end\t # << end @parsed_content.each\n\t \n\t end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def start_line_number; end",
"def start_line_number; end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def coding_region_cdna_start\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.start_exon\n answer += self.translation.seq_start\n return answer\n else\n answer += exon.length\n end\n end\n \n end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def print_mc\n if @minimal_segment_count == segment_count+1\n print \"0\\n\";\n else\n print \"#{@minimal_segment_count}\\n\"\n for i in 0..segment_count-1\n if is_in_mc?(i)\n get_segment(i).to_s2\n end\n end\n end\n end",
"def segment_name()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.DetailedTimerComponentState_segment_name(@handle.ptr)\n result\n end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def end_pos; end",
"def end_pos; end",
"def sub_sector; end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def start_line_number=(_); end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def metadata_start\n 2\n end",
"def input_end_offset\n _response_word.fetch(\"endingPos\", nil)\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def sn\n end",
"def stop\n return self.seq_region_end\n end",
"def read_end_stops()\n @ramps_arduino.execute_command('F81', false, @status_debug_msg)\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def segments=(_arg0); end",
"def get_stop_details(line)\nx = $subway_lines\n for i in x do\n if line == i[:line]\nline_details = i[:stops]\n end\nend\nline_details\nend",
"def payment_advice_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FTX\\+AAG.*/)\n end",
"def edid_decode_dtd( bytes, offset )\n print \" \"\n pixel_clock = ((bytes[offset+1])<<8) + bytes[offset+0]\n if pixel_clock > 0\n # Detailed Timing Descriptor data\n puts \"Detailed Timing Descriptor: %u MHz\" % (pixel_clock / 100)\n else\n # Monitor Descriptor data\n descriptor_flag2 = bytes[offset+2] # reserved, should be 0\n descriptor_type = bytes[offset+3]\n descriptor_flag4 = bytes[offset+4] # reserved, should be 0\n data_start = offset+5\n data_end = offset+17\n case descriptor_type\n when 0xff\n puts \"Display Serial Number: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfe\n puts \"Unspecified Text: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfd\n puts \"Display Range Limits Descriptor\"\n when 0xfc\n puts \"Display Name: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfb\n puts \"Additional White Point Data\"\n when 0xfa\n puts \"Additional Standard Timing Identifiers\"\n when 0xf9\n puts \"Display Color Management (DCM) Descriptor\"\n when 0xf8\n puts \"CVT 3-byte Timing Codes\"\n when 0xf7\n puts \"Additional Standard Timing Descriptor\"\n when 0x10\n puts \"Dummy Descriptor\"\n when 0x00..0x0f # manufacturer reserved descriptors\n puts \"Manufacturer Reserved Descriptor (type 0x02x)\" % descriptor_type\n puts \" Data: %s\" % bytes[data_start..data_end].map{|b|\"%02x\"%b}.join(' ')\n else\n puts \"Undefined Monitor Descriptor (type 0x%02x)\" % descriptor_type\n puts \" Data: %s\" % bytes[data_start..data_end].map{|b|\"%02x\"%b}.join(' ')\n end\n end\nend",
"def print_known_cidr_blks_asce\n\t\tputs \"\\nIndex of known CIDR Net blocks in Ascending Order:\"\n\t\tputs @known_cidr_blks_asce_index\n\t\tputs \"End of the Index\"\n\tend",
"def find_stations(si, ei, line) # si is short for start index and ei is short for end index\n if ei >= si\n line_stop_count = ei - si\n passing_stations = line[(si+1)..ei].join(\", \")\n else\n line_stop_count = si - ei\n passing_stations = line[(ei)..(si-1)].reverse.join(\", \")\n end\n return {:num_stops => line_stop_count, :stops => passing_stations}\nend",
"def sub_hl\n end",
"def span_start; end",
"def output_ris_end\n \"\"\n end",
"def adjust_sp_binary()\n \"@SP\\nM=M-1\\n@SP\\nA=M\\nD=M\\n@SP\\nM=M-1\\nA=M\\n\"\n end",
"def end\n attributes.fetch(:end)\n end",
"def ld_hl_sp_plusr8\n end",
"def context_type_event_detailed_800_header\n ContextTypeDef.new(\n :event_detailed_800_header,\n [\n /\\s*(POS)\\s+(COGNOME E NOME)\\s+(NAZ)\\s+(\\d{2,4} m\\s+){8}\\s*(ARRIVO)/i,\n /\\s*SOCIETA'\\s+(ANNO)/i,\n /\\s*(800) *(m|metri)? *(stile|SL|ST)/i\n ]\n )\n end",
"def pos_rseg_header\n pos_page_body\n end",
"def strand; @data[8]; end",
"def gui_new_segno\n @net_controller.send_data_to_server( @net_controller.build_cmd(:gui_new_segno, \"\") )\n end",
"def pos_header\n @position - 2\n end",
"def starting_position; end",
"def part_of_head?\n return true if self.position == 0\n begin\n if self.nominal?\n # there are no non-nominal segments between given\n # segment and the beginning of its spelling\n gap = false\n self.spelling.segments.each do |prev_segment|\n break if prev_segment == self\n gap = true if !prev_segment.nominal?\n end\n !gap\n end\n rescue Exception => ex\n #puts ex\n false\n end\n end",
"def details\n response = cs_get \"/segments/#{segment_id}.json\", {}\n Hashie::Mash.new(response)\n end",
"def zero_start\n @chr_start - 1\n end",
"def show_program_h_offset\n\t\t\tputs \" Start of program headers: #{@elf_program_h_offset.to_h} (bytes into file)\"\n\t\tend",
"def Segment\n A = 1\n B = 2\n C = 4\n D = 8\n E = 16\n F = 32\n G = 64\n H = 128\n J = 256\n K = 512\n M = 1024\n N = 2048\n P = 4096\n\n def initialize(segments=0)\n @segments = segments\n end\n\n def s(seg=Segment.new)\n Segment.new seg.segments + SOUTH\n end\nend",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def start_line\n attributes.fetch(:startLine)\n end",
"def parse_eob_marker; end",
"def parse_footnote_marker; end",
"def read_trim_params\n # compensation parameter register mapping\n Calibration = Struct.new(\n # Register Address Register content Data type\n :dig_T1, # 0x88 / 0x89 dig_T1 [7:0] / [15:8] unsigned short\n :dig_T2, # 0x8A / 0x8B dig_T2 [7:0] / [15:8] signed short\n :dig_T3, # 0x8C / 0x8D dig_T3 [7:0] / [15:8] signed short\n :dig_P1, # 0x8E / 0x8F dig_P1 [7:0] / [15:8] unsigned short\n :dig_P2, # 0x90 / 0x91 dig_P2 [7:0] / [15:8] signed short\n :dig_P3, # 0x92 / 0x93 dig_P3 [7:0] / [15:8] signed short\n :dig_P4, # 0x94 / 0x95 dig_P4 [7:0] / [15:8] signed short\n :dig_P5, # 0x96 / 0x97 dig_P5 [7:0] / [15:8] signed short\n :dig_P6, # 0x98 / 0x99 dig_P6 [7:0] / [15:8] signed short\n :dig_P7, # 0x9A / 0x9B dig_P7 [7:0] / [15:8] signed short\n :dig_P8, # 0x9C / 0x9D dig_P8 [7:0] / [15:8] signed short\n :dig_P9, # 0x9E / 0x9F dig_P9 [7:0] / [15:8] signed short\n :dig_H1, # 0xA1 dig_H1 [7:0] unsigned char\n :dig_H2, # 0xE1 / 0xE2 dig_H2 [7:0] / [15:8] signed short\n :dig_H3, # 0xE3 dig_H3 [7:0] unsigned char\n :dig_H4, # 0xE4 / 0xE5[3:0] dig_H4 [11:4] / [3:0] signed short\n :dig_H5, # 0xE5[7:4] / 0xE6 dig_H5 [3:0] / [11:4] signed short\n :dig_H6, # 0xE7 dig_H6 signed char\n :t_fine\n )\n calib = [] \n\n # data addresses\n dig_t_reg = 0x88\n dig_p_reg = 0x8E\n dig_h_reg1 = 0xA1\n dig_h_reg2 = 0xE1\n \n data = read(dig_t_reg, 6)\n calib << ((data[1] << 8) | data[0]) # uint16_t dig_T1 [1][0] \n calib << int16(data[3], data[2]) # int16_t dig_T2 [3][2]\n calib << int16(data[5], data[4]) # int16_t dig_T3 [5][4]\n\n data = read(dig_p_reg, 18)\n calib << ((data[1] << 8) | data[0]) # uint16_t dig_P1 [1][0]\n calib << int16(data[3], data[2]) # int16_t dig_P2 [3][2]\n calib << int16(data[5], data[4]) # int16_t dig_P3 [5][4]\n calib << int16(data[7], data[6]) # int16_t dig_P4 [7][6]\n calib << int16(data[9], data[8]) # int16_t dig_P5 [9][8]\n calib << int16(data[11], data[10]) # int16_t dig_P6 [11][10]\n calib << int16(data[13], data[12]) # int16_t dig_P7 [13][12]\n calib << int16(data[15], data[14]) # int16_t dig_P8 [15][14]\n calib << int16(data[17], data[16]) # int16_t dig_P9 [17][16]\n\n data = read(dig_h_reg1, 1)\n calib << data[0] # uint8_t dig_H1 [0]\n \n data = read(dig_h_reg2, 7)\n calib << int16(data[1], data[0]) # int16_t dig_H2 [1],[0]\n calib << data[2] # uint8_t dig_H3 [2] \n\n # 109876543210 bit[11:0]\n # xxxxxxxx.... dig_H4_msb [11:4] [3]\n # ....xxxx dig_H4_lsb [3:0] [4]\n # xxxxxxxxxxxx dig_H4 [11:0] \n dig_H4_msb = (data[3] >> 4) & 0x0F\n dig_H4_lsb = ((data[3] << 4) & 0xF0) | (data[4] & 0x0F) \n calib << int16(dig_H4_msb, dig_H4_lsb) # int16_t dig_H4 [3][4]\n \n # 109876543210 bit[11:0]\n # xxxxxxxx.... dig_H5_msb [11:4] [5]\n # xxxx.... dig_H5_lsb [7:4] [4]\n # xxxxxxxxxxxx dig_H5 [11:0]\n dig_H5_msb = (data[5] >> 4) & 0x0F\n dig_H5_lsb = ((data[5] << 4) & 0xF0) | (data[4] >> 4) \n calib << int16(dig_H5_msb, dig_H5_lsb) # int16_t dig_H5 [4][5]\n \n calib << int8(data[6]) # int8_t dig_H6 [6]\n\n @calib = Calibration.new(*calib)\n end",
"def parse_moreinfo\n if @input.match?(/Tov.* ld\\.:\\n/)\n @ast[\"moreinfo\"] = @input.scan_until(/^\\n/)\n end\n end",
"def segment\n return asset_on_segment_history.segment rescue nil\n end",
"def to_s\n @segment\n end",
"def interchange_control_trailer\n iea_elements = []\n iea_elements << 'IEA'\n iea_elements << '1'\n iea_elements << (@isa_record.isa_number.to_s.rjust(9, '0') if @isa_record)\n iea_elements.join(@element_seperator)\n end",
"def error_segment\n segment(100)\n end",
"def visible_line_number\n @ev_height\n end",
"def first_offset; end",
"def first_offset; end",
"def calcSectionNo(startNo=1, range=0, size=0, dep=1, str='', outerStack)\n stack = outerStack #Stack.instance\n i = dep.to_i\n counter = 0\n numberStr = [[\"%\",i,counter],[\"%%\",i,counter],[\"%%%\",i,counter],\n [\"%%%%\",i,counter],[\"%%%%%\",i,counter],[\"%%%%%%\",i,counter]]\n number = \"\"\n headNo = size.to_i\n\n if (headNo > $MAX_H) || (headNo <= 0) then \n @@log.error(\"AoBane Syntax Error: Header shortage!\") \n raise SyntaxError,\"Headder shortage!\"\n else\n (1..headNo).each_with_index{|k| #h1 to h6\n p k\n if (k < headNo) then\n p \"+++\" # #{k},#{stack.sizeofStack}\"\n if k >= stack.size then\n stack.push(numberStr[k])\n end\n elsif k == headNo then\n p \"---\"\n if stack.size == 0 then\n stack.push(numberStr[k-1])\n end\n if stack.last[$S_SLOT].size > numberStr[k-1][$S_SLOT].size then\n loop do\n stack.pop\n if stack.last[$S_SLOT].size == numberStr[k-1][$S_SLOT].size then\n break\n end\n end\n end\n else\n p \"~~~~\"\n stack.push(numberStr[k])\n end #if...elsif \n }\n=begin\n else\n @@log.error(\"AoBane Syntax Error: Header Number Overflow!\")\n raise SyntaxError,\"Header Number Overflow!\"\n end #case\n=end\n end #if...else\n p \"$$$$\" \n number = \"\"\n stack.each { |item|\n if item == stack.last then\n item[$N_SLOT] += item[$C_SLOT]\n item[$C_SLOT] = 1\n end\n number << (item[$N_SLOT]).to_s + '.'\n @@log.debug number\n }\n \n h = \"#\"\n times = startNo.to_i + size.to_i - 1\n return h*times + number + str\nend",
"def end(p0) end",
"def interchange_control_trailer\n [ 'IEA', '1', (@isa_record.isa_number.to_s.rjust(9, '0') if @isa_record)].join(@element_seperator)\n end",
"def ipn_end_point; end",
"def interchange_control_trailer\n iea_elements = []\n iea_elements << 'IEA'\n iea_elements << '1'\n iea_elements << facility.lockbox_number.rjust(9, '0')\n iea_elements.join(@element_seperator)\n end",
"def line_number\n number[6..-1]\n end",
"def record_header_redundant_field_end_offsets(header, cursor)\n header.n_fields.times.map do |n|\n cursor.name(\"field_end_offset[#{n}]\") { cursor.read_uint_by_size(header.offset_size) }\n end\n end",
"def size_fseg_header\n 2 * Innodb::FsegEntry::SIZE\n end",
"def show_section_name_idx\n\t\t\t puts \" Section header string table index: #{@elf_section_name_idx}\"\n\t\tend",
"def size_rseg_header\n 4 + 4 + Innodb::List::BASE_NODE_SIZE + Innodb::FsegEntry::SIZE\n end",
"def end_pos=(_); end",
"def inspect_details\n\t\t\treturn %Q{FIN:%d RSV1:%d RSV2:%d RSV3:%d OPCODE:%s (0x%x) -- %0.2fK body} % [\n\t\t\t\tself.fin? ? 1 : 0,\n\t\t\t\tself.rsv1? ? 1 : 0,\n\t\t\t\tself.rsv2? ? 1 : 0,\n\t\t\t\tself.rsv3? ? 1 : 0,\n\t\t\t\tself.opcode,\n\t\t\t\tself.numeric_opcode,\n\t\t\t\t(self.payload.size / 1024.0),\n\t\t\t]\n\t\tend",
"def ipn_end_point=(_arg0); end",
"def cds_seq\n cds_length = self.coding_region_cdna_end - self.coding_region_cdna_start + 1\n \n return self.seq[(self.coding_region_cdna_start - 1), cds_length]\n end"
] | [
"0.5543945",
"0.5540212",
"0.5531124",
"0.54698503",
"0.5462701",
"0.54379207",
"0.541752",
"0.5398819",
"0.5388276",
"0.5382018",
"0.53728616",
"0.5352744",
"0.53500956",
"0.5344256",
"0.5343119",
"0.53359777",
"0.5304033",
"0.52917093",
"0.52869225",
"0.5284558",
"0.52713436",
"0.52713436",
"0.52622354",
"0.5257133",
"0.52528304",
"0.52428126",
"0.5236285",
"0.52032673",
"0.5195013",
"0.5172083",
"0.51590306",
"0.5157469",
"0.51410514",
"0.51276284",
"0.51276284",
"0.512224",
"0.51106435",
"0.5088733",
"0.50738907",
"0.5068345",
"0.50316006",
"0.5018549",
"0.5004111",
"0.5003507",
"0.5000453",
"0.4991395",
"0.49897745",
"0.49879605",
"0.49866685",
"0.49866685",
"0.49795878",
"0.49770218",
"0.4975678",
"0.49677172",
"0.49665454",
"0.49640355",
"0.49589363",
"0.49558422",
"0.4955488",
"0.4953945",
"0.49342233",
"0.49331588",
"0.49239862",
"0.49088603",
"0.4904947",
"0.49049315",
"0.49006447",
"0.48967335",
"0.48925945",
"0.48924062",
"0.4889556",
"0.48870164",
"0.48766372",
"0.4874818",
"0.486421",
"0.4852258",
"0.48468637",
"0.48451278",
"0.4842505",
"0.48167062",
"0.48154408",
"0.48055708",
"0.47989166",
"0.4797173",
"0.4796338",
"0.4785161",
"0.4785161",
"0.47834373",
"0.47807187",
"0.4780145",
"0.47729897",
"0.47724187",
"0.47712243",
"0.4770691",
"0.47675788",
"0.47670552",
"0.47659156",
"0.47630072",
"0.47604415",
"0.47591394",
"0.4754405"
] | 0.0 | -1 |
End of DTM_232 Segment Details Start of DTM_233 Segment Details | def claim_statement_period_end(*options)
claim_start_date = @classified_eob.get_date_for_netwrx(:end_date, @claim)
return nil if claim_start_date.nil?
return claim_start_date if @classified_eob.is_claim_eob? && claim_start_date.eql?('00000000')
claim_start_date if can_print_service_date(claim_start_date)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def street_segments\r\n\t\tstart_street_segments + end_street_segments\r\n\tend",
"def read_end_stops()\n execute_command('F81', false, @status_debug_msg)\n end",
"def segment_0; segments[0]; end",
"def findEndBuffer\n offset = @file_size - 4\n while EVENTRECORDLENGTH.decode(read_buffer(offset, 4, -1))[:record_length] == 0x27\n offset -= 4\n end\n offset + 4\n end",
"def b_offset\n @last_4[1].to_i\n end",
"def segments; end",
"def structure_lines(lines)\n last_structure_line = lines.index{ |line| line =~ /\\AM END/ }\n lines[4..last_structure_line].join\n end",
"def zero_end\n @chr_end\n end",
"def coding_region_cdna_end\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.end_exon\n answer += self.translation.seq_end\n return answer\n else\n answer += exon.length\n end\n end\n end",
"def print_segments\n\t\t# METHOD: after all of the HL7 content has been parsed, print the contents of each segment in a more easily readible format\n\t\t# output for 1 segment looks like:\n\t\t\t\t\t#~ :: Segment: PID\n\t\t\t\t\t#~ PID-0: Segment => PID\n\t\t\t\t\t#~ PID-1: Set ID - PID => 1\n\t\t\t\t\t#~ PID-2: Patient ID => \n\t\t\t\t\t#~ PID-3: Patient Identifier List => [[\"P00057804\", \"\", \"\", \"\", \"PN\"], [\"4009887514\", \"\", \"\", \"AUSHIC\", \"MC\"], [\"SMIAL001\", \"\", \"\", \"\", \"PI\"]]\n\t\t\t\t\t#~ PID-4: Alternate Patient ID - PID => \n\t\t\t\t\t#~ PID-5: Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\n\t\t\t\t\t#~ PID-6: Mother’s Maiden Name => \n\t\t\t\t\t#~ PID-7: Date/Time of Birth => 19770621\n\t\t\t\t\t#~ PID-8: Sex => M\n\t\t\t\t\t#~ PID-9: Patient Alias => \n\t\t\t\t\t#~ PID-10: Race => \n\t\t\t\t\t#~ PID-11: Patient Address => [\"818 Beach Road\", \"\", \"BEECHMERE\", \"\", \"4510\", \"AU\", \"H\"]\n\n\t\t\t# iterate over each segment\n\t\t\t@parsed_content.each do |segment|\n\t\t\t\tseg = segment[0]\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # eg => \"PID\"\n\t\t\t\t\n\t\t\t\t#get yaml file details\n\t\t\t\tyamlfile = \"hl7specification/#{seg}\"\t\t\t\t\t# for each segment, find the appropriate yaml file (ie one for each segment)\n\t\t\t\tspecs = YAML.load_file(yamlfile)\t\t\t\t\t\t\t# load the yaml file\n\t\t\t\t\n\t\t\t puts \":: #{specs[\"Header\"][\"name\"]} (#{seg})\"\t\t\t# print the text eg \":: Message Header Segment (MSH)\"\n\t\t\t \n\t\t\t # then iterate over each field in the particular segment\n\t\t\t\tsegment.each_with_index do |field, index|\t\t\t\t\t# then for each field...\n\t\t\t\t\tif index > 0 then\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only if the index is 1 or more (ie the first value is not useful here)\n\t\t\t\t\t\tfld = \"#{seg}-#{index}\"\t\t\t\t\t\t\t\t\t # get the field id => \"PID-5\"\n\t\t\t\t\t\tprint \" #{fld}: \"\t\t\t\t\t\t \t\t\t\t\t\t# on each line print the particular field being queried eg \"PID-5: \"\n\t\t\t\t\t\tfldname = specs[fld][\"name\"]\t\t\t\t\t\t\t\t\t# get the name of the field from the yaml file\n\t\t\t\t\t\tprint \"#{fldname} => \"\t\t\t\t\t\t\t\t\t\t\t\t# print the field name after the field eg \"PID-5: Patient Name\"\n\t\t\t\t\t\tif field.class == String then\t\t\t\t\t\t\t\t\t# if the field class is a string...\n\t\t\t\t\t\t\tputs field\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# then just print (ie add) the value of the string eg \"PID-7: Date/Time of Birth => 19770621\"\n\t\t\t\t\t\telsif field.class == Array then\t\t\t\t\t\t\t\t# otherwise if the field is an array, ie there is lower level structure...\n\t\t\t\t\t\t\tputs field.inspect\t\t\t\t\t\t\t\t\t\t\t\t\t# then print the structure eg \"PID-5 Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\"\n\t\t\t\t\t\tend # << end if field...\n\t\t\t\t\tend # << end if index > 0 \n\t\t\t\tend\t # << end segment.each_with_index\n\t\t\t\tputs\t\n\t\t end\t # << end @parsed_content.each\n\t \n\t end",
"def multi_end2(ttls)\n rpls = ''\n ttl = @tg_end.size-1\n ttl = ttls-1 if ttls\n ttl.downto(0) do |i|\n sz = @tg_end[i][/^ +/].to_s.size\n if ttls || @spc.size <= sz\n send = @tg_end.pop\n if send.strip[0,5]==\"!run!\"\n scrpt = send.gsub(\"\\n\",\"\\n#{@spc}\").split(\"\\n\")\n @doc_src = scrpt[1,99]+@doc_src\n else\n spc = send[/(^[ \\t]*)/,1].to_s\n rpls << (send.gsub(\"\\n\",\"\\n#{spc}\") + \"\\n\") \n end\n end\n end\n p \"End2 : #{rpls}\" if @dbg[:parse] && rpls!= ''\n rpls\n end",
"def parse835(first_line) read_header(first_line).read_parties.read_details.read_adjustments.read_trailer; end",
"def extract_startend(params)\n params[\"pio_startT\"] = ((params[\"pio_startT\"].to_r) * 1000).round(0).to_s if params[\"pio_startT\"]\n params[\"pio_endT\"] = ((params[\"pio_endT\"].to_r) * 1000).round(0).to_s if params[\"pio_endT\"]\n end",
"def read_end_stops()\n start_command('F81', false, @status_debug_msg)\n end",
"def stations_in_between2 (end_lane)\n if ($end_connection>$end)\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end+1...$end_connection]\n else\n puts \"then the stations to follow are:\"\n puts $mta[end_lane][$end_connection+1...$end].reverse\n end #end of the if\n end",
"def pos_fseg_header\n pos_index_header + size_index_header\n end",
"def get_main_section(string)\n string.split(\"[End of section] \\n\")[2]\n end",
"def rseg_header\n cursor(pos_rseg_header).name(\"rseg_header\") do |c|\n {\n :max_size => c.name(\"max_size\") { c.get_uint32 },\n :history_size => c.name(\"history_size\") { c.get_uint32 },\n :history_list => c.name(\"history_list\") {\n Innodb::List::History.new(@space, Innodb::List.get_base_node(c))\n },\n :fseg => c.name(\"fseg\") { Innodb::FsegEntry.get_inode(@space, c) },\n }\n end\n end",
"def edid_decode_dtd( bytes, offset )\n print \" \"\n pixel_clock = ((bytes[offset+1])<<8) + bytes[offset+0]\n if pixel_clock > 0\n # Detailed Timing Descriptor data\n puts \"Detailed Timing Descriptor: %u MHz\" % (pixel_clock / 100)\n else\n # Monitor Descriptor data\n descriptor_flag2 = bytes[offset+2] # reserved, should be 0\n descriptor_type = bytes[offset+3]\n descriptor_flag4 = bytes[offset+4] # reserved, should be 0\n data_start = offset+5\n data_end = offset+17\n case descriptor_type\n when 0xff\n puts \"Display Serial Number: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfe\n puts \"Unspecified Text: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfd\n puts \"Display Range Limits Descriptor\"\n when 0xfc\n puts \"Display Name: %s\" % bytes[data_start..data_end].map{|c|\"%c\"%c}.join.split(\"\\n\")\n when 0xfb\n puts \"Additional White Point Data\"\n when 0xfa\n puts \"Additional Standard Timing Identifiers\"\n when 0xf9\n puts \"Display Color Management (DCM) Descriptor\"\n when 0xf8\n puts \"CVT 3-byte Timing Codes\"\n when 0xf7\n puts \"Additional Standard Timing Descriptor\"\n when 0x10\n puts \"Dummy Descriptor\"\n when 0x00..0x0f # manufacturer reserved descriptors\n puts \"Manufacturer Reserved Descriptor (type 0x02x)\" % descriptor_type\n puts \" Data: %s\" % bytes[data_start..data_end].map{|b|\"%02x\"%b}.join(' ')\n else\n puts \"Undefined Monitor Descriptor (type 0x%02x)\" % descriptor_type\n puts \" Data: %s\" % bytes[data_start..data_end].map{|b|\"%02x\"%b}.join(' ')\n end\n end\nend",
"def end_line\n attributes.fetch(:endLine)\n end",
"def show_section_h_offset\n\t\t\tputs \" Start of section headers: #{@elf_section_h_offset} (bytes into file)\"\n\t\tend",
"def start_line_number; end",
"def start_line_number; end",
"def satellitenumber\n @line1[02...07].to_i\n end",
"def end_pos; end",
"def end_pos; end",
"def read_end_data\n _app, data = File.read(file).split(/^__END__$/, 2)\n data || ''\n end",
"def segments\n if magic32?\n command(:LC_SEGMENT)\n else\n command(:LC_SEGMENT_64)\n end\n end",
"def stop\n return self.seq_region_end\n end",
"def print_known_cidr_blks_desc\n\t\tputs \"\\nIndex of known CIDR Net blocks in Descendant Order:\"\n\t\tputs @known_cidr_blks_desc_index\n\t\tputs \"End of the Index\"\n\tend",
"def sub_sector; end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def segment_ends_s\n [from_end.to_s, to_end.to_s].join(\"---\")\n end",
"def getTripString(line, startStop, endStop) \n lineArray = getLine(line)\n string = \"\" # to save the station \n start_point = lineArray.index(startStop) # save the index of start point\n end_point = lineArray.index(endStop) # save the index of end point\n # p start_point \n # p end_point\n if start_point > end_point\n start_point.downto(end_point) do |j| \n string += \"#{lineArray[j]}, \"\n end\n else\n start_point.upto(end_point) do |j|\n string += \"#{lineArray[j]}, \"\n end \n end \n return string[0...-2] \nend",
"def nontrivial_end_line\n if successor\n successor.line_numbers.begin - 1\n else\n @document.last_non_empty_line\n end\n end",
"def read_end_stops()\n @ramps_arduino.execute_command('F81', false, @status_debug_msg)\n end",
"def segment_name()\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.DetailedTimerComponentState_segment_name(@handle.ptr)\n result\n end",
"def strand; @data[8]; end",
"def input_end_offset\n _response_word.fetch(\"endingPos\", nil)\n end",
"def metadata_start\n 2\n end",
"def ld_hl_sp_plusr8\n end",
"def normal_data_start(text)\n text.index(\"\\1\\n\", 2) + 2\n end",
"def coding_region_cdna_start\n answer = 0\n \n self.exons.each do |exon|\n if exon == self.translation.start_exon\n answer += self.translation.seq_start\n return answer\n else\n answer += exon.length\n end\n end\n \n end",
"def health_remark_code_segments\n @eob = service.insurance_payment_eob\n facility = @eob.check_information.job.batch.facility\n health_remark_code_segments = []\n health_remark_code_segments << compute_lq(\"in\")\n health_remark_code_segments << compute_lq(\"out\") \n if facility.details[:interest_in_service_line] && service.interest_service_line?\n health_remark_code_segments << lq_rx_segments(\"109975\") if @eob.claim_interest.to_f > 0.0 \n end \n health_remark_code_segments << lq_rx_segments(\"109702\") if @eob.hcra.to_f > 0.0\n health_remark_code_segments.compact!\n health_remark_code_segments.flatten\n end",
"def metadata_end(text)\n text.index(\"\\1\\n\", 2)\n end",
"def head\n self.segments.to_a.find{|t| t.nominal}\n end",
"def start_line_number=(_); end",
"def coding_region_genomic_end\n strand = self.translation.start_exon.seq_region_strand\n if strand == 1\n return self.translation.end_exon.seq_region_start + ( self.translation.seq_end - 1 )\n else\n return self.translation.start_exon.seq_region_end - ( self.translation.seq_start - 1 )\n end\n end",
"def output_ris_end\n \"\"\n end",
"def span_start; end",
"def sub_hl\n end",
"def detail\n \"#{line}\\n#{' ' * line_offset}^\"\n end",
"def entry_ended_pos\n @splitter.entry_ended_pos\n end",
"def starting_position; end",
"def interchange_control_trailer\n iea_elements = []\n iea_elements << 'IEA'\n iea_elements << '1'\n iea_elements << facility.lockbox_number.rjust(9, '0')\n iea_elements.join(@element_seperator)\n end",
"def end\n attributes.fetch(:end)\n end",
"def segments=(_arg0); end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def interchange_control_trailer\n iea_elements = []\n iea_elements << 'IEA'\n iea_elements << '1'\n iea_elements << (@isa_record.isa_number.to_s.rjust(9, '0') if @isa_record)\n iea_elements.join(@element_seperator)\n end",
"def print_mc\n if @minimal_segment_count == segment_count+1\n print \"0\\n\";\n else\n print \"#{@minimal_segment_count}\\n\"\n for i in 0..segment_count-1\n if is_in_mc?(i)\n get_segment(i).to_s2\n end\n end\n end\n end",
"def show_program_h_offset\n\t\t\tputs \" Start of program headers: #{@elf_program_h_offset.to_h} (bytes into file)\"\n\t\tend",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def payment_advice_segment_index(segments, start_pos, end_pos=nil)\n find_index_by_regex(segments, start_pos, end_pos, /^FTX\\+AAG.*/)\n end",
"def zero_start\n @chr_start - 1\n end",
"def record_header_redundant_field_end_offsets(header, cursor)\n header.n_fields.times.map do |n|\n cursor.name(\"field_end_offset[#{n}]\") { cursor.read_uint_by_size(header.offset_size) }\n end\n end",
"def interchange_control_trailer\n [ 'IEA', '1', (@isa_record.isa_number.to_s.rjust(9, '0') if @isa_record)].join(@element_seperator)\n end",
"def Segment\n A = 1\n B = 2\n C = 4\n D = 8\n E = 16\n F = 32\n G = 64\n H = 128\n J = 256\n K = 512\n M = 1024\n N = 2048\n P = 4096\n\n def initialize(segments=0)\n @segments = segments\n end\n\n def s(seg=Segment.new)\n Segment.new seg.segments + SOUTH\n end\nend",
"def error_segment\n segment(100)\n end",
"def to_s\n @segment\n end",
"def context_type_event_detailed_800_header\n ContextTypeDef.new(\n :event_detailed_800_header,\n [\n /\\s*(POS)\\s+(COGNOME E NOME)\\s+(NAZ)\\s+(\\d{2,4} m\\s+){8}\\s*(ARRIVO)/i,\n /\\s*SOCIETA'\\s+(ANNO)/i,\n /\\s*(800) *(m|metri)? *(stile|SL|ST)/i\n ]\n )\n end",
"def get_stop_details(line)\nx = $subway_lines\n for i in x do\n if line == i[:line]\nline_details = i[:stops]\n end\nend\nline_details\nend",
"def end_pos=(_); end",
"def first_offset; end",
"def first_offset; end",
"def segment_index \n if object.trip.previous_trip\n return 1\n else\n return 0\n end\n end",
"def read_trim_params\n # compensation parameter register mapping\n Calibration = Struct.new(\n # Register Address Register content Data type\n :dig_T1, # 0x88 / 0x89 dig_T1 [7:0] / [15:8] unsigned short\n :dig_T2, # 0x8A / 0x8B dig_T2 [7:0] / [15:8] signed short\n :dig_T3, # 0x8C / 0x8D dig_T3 [7:0] / [15:8] signed short\n :dig_P1, # 0x8E / 0x8F dig_P1 [7:0] / [15:8] unsigned short\n :dig_P2, # 0x90 / 0x91 dig_P2 [7:0] / [15:8] signed short\n :dig_P3, # 0x92 / 0x93 dig_P3 [7:0] / [15:8] signed short\n :dig_P4, # 0x94 / 0x95 dig_P4 [7:0] / [15:8] signed short\n :dig_P5, # 0x96 / 0x97 dig_P5 [7:0] / [15:8] signed short\n :dig_P6, # 0x98 / 0x99 dig_P6 [7:0] / [15:8] signed short\n :dig_P7, # 0x9A / 0x9B dig_P7 [7:0] / [15:8] signed short\n :dig_P8, # 0x9C / 0x9D dig_P8 [7:0] / [15:8] signed short\n :dig_P9, # 0x9E / 0x9F dig_P9 [7:0] / [15:8] signed short\n :dig_H1, # 0xA1 dig_H1 [7:0] unsigned char\n :dig_H2, # 0xE1 / 0xE2 dig_H2 [7:0] / [15:8] signed short\n :dig_H3, # 0xE3 dig_H3 [7:0] unsigned char\n :dig_H4, # 0xE4 / 0xE5[3:0] dig_H4 [11:4] / [3:0] signed short\n :dig_H5, # 0xE5[7:4] / 0xE6 dig_H5 [3:0] / [11:4] signed short\n :dig_H6, # 0xE7 dig_H6 signed char\n :t_fine\n )\n calib = [] \n\n # data addresses\n dig_t_reg = 0x88\n dig_p_reg = 0x8E\n dig_h_reg1 = 0xA1\n dig_h_reg2 = 0xE1\n \n data = read(dig_t_reg, 6)\n calib << ((data[1] << 8) | data[0]) # uint16_t dig_T1 [1][0] \n calib << int16(data[3], data[2]) # int16_t dig_T2 [3][2]\n calib << int16(data[5], data[4]) # int16_t dig_T3 [5][4]\n\n data = read(dig_p_reg, 18)\n calib << ((data[1] << 8) | data[0]) # uint16_t dig_P1 [1][0]\n calib << int16(data[3], data[2]) # int16_t dig_P2 [3][2]\n calib << int16(data[5], data[4]) # int16_t dig_P3 [5][4]\n calib << int16(data[7], data[6]) # int16_t dig_P4 [7][6]\n calib << int16(data[9], data[8]) # int16_t dig_P5 [9][8]\n calib << int16(data[11], data[10]) # int16_t dig_P6 [11][10]\n calib << int16(data[13], data[12]) # int16_t dig_P7 [13][12]\n calib << int16(data[15], data[14]) # int16_t dig_P8 [15][14]\n calib << int16(data[17], data[16]) # int16_t dig_P9 [17][16]\n\n data = read(dig_h_reg1, 1)\n calib << data[0] # uint8_t dig_H1 [0]\n \n data = read(dig_h_reg2, 7)\n calib << int16(data[1], data[0]) # int16_t dig_H2 [1],[0]\n calib << data[2] # uint8_t dig_H3 [2] \n\n # 109876543210 bit[11:0]\n # xxxxxxxx.... dig_H4_msb [11:4] [3]\n # ....xxxx dig_H4_lsb [3:0] [4]\n # xxxxxxxxxxxx dig_H4 [11:0] \n dig_H4_msb = (data[3] >> 4) & 0x0F\n dig_H4_lsb = ((data[3] << 4) & 0xF0) | (data[4] & 0x0F) \n calib << int16(dig_H4_msb, dig_H4_lsb) # int16_t dig_H4 [3][4]\n \n # 109876543210 bit[11:0]\n # xxxxxxxx.... dig_H5_msb [11:4] [5]\n # xxxx.... dig_H5_lsb [7:4] [4]\n # xxxxxxxxxxxx dig_H5 [11:0]\n dig_H5_msb = (data[5] >> 4) & 0x0F\n dig_H5_lsb = ((data[5] << 4) & 0xF0) | (data[4] >> 4) \n calib << int16(dig_H5_msb, dig_H5_lsb) # int16_t dig_H5 [4][5]\n \n calib << int8(data[6]) # int8_t dig_H6 [6]\n\n @calib = Calibration.new(*calib)\n end",
"def pan_last4\r\n params['panLast4']\r\n end",
"def print_known_cidr_blks_asce\n\t\tputs \"\\nIndex of known CIDR Net blocks in Ascending Order:\"\n\t\tputs @known_cidr_blks_asce_index\n\t\tputs \"End of the Index\"\n\tend",
"def edid_decode_extension( bytes, offset )\n extension_tag = bytes[offset+0]\n extension_revision = bytes[offset+1]\n extension_checksum = bytes[offset+127]\n puts\n case extension_tag\n when 0x00\n puts \"Timing Extension\"\n puts \" Extension revision %u\" % extension_revision\n when 0x01\n puts \"LCD Timings Extension\"\n puts \" Extension revision %u\" % extension_revision\n when 0x02 # CTA EDID Timing Extension\n puts \"CTA EDID Additional Timing Data Extension\"\n puts \" Revision %u\" % extension_revision\n edid_decode_cta_block( bytes, offset, \" \" )\n when 0x10\n puts \"Video Timing Block\"\n puts \"Extension revision %u\" % extension_revision\n when 0x20\n puts \"EDID 2.0 Extension\"\n puts \"Extension revision %u\" % extension_revision\n when 0x30\n puts \"Color information type 0\"\n puts \"Extension revision %u\" % extension_revision\n when 0x40 # VESA standard has this as DVI feature data\n puts \"Display Information Extension (DI-EXT)\"\n when 0x50 # VESA standard has this as Touch screen data\n puts \"Localized String Extension (LS-EXT)\"\n when 0x60\n puts \"Microdisplay Interface Extension (MI-EXT)\"\n when 0x70\n puts \"Display ID Extension\"\n when 0xa7, 0xaf, 0xbf\n puts \"Display Transfer Characteristics Data Block (DTCDB)\"\n when 0xf0\n puts \"EDID Block Map\"\n when 0xff\n puts \"Manufacturer Defined Extension\"\n puts \" Data: %s\" % extension_data.map{|b|\"%02x \"%b}.join\n else\n puts \"Undefined Extension Type 0x%02x\" % extension_tag\n puts \" Data: %s\" % extension_data.map{|b|\"%02x \"%b}.join\n end\nend",
"def offset; end",
"def offset; end",
"def offset; end",
"def end_tag\n \"#TESTLAB-END-#{self.bridge.to_s.upcase}\"\n end",
"def sn\n end",
"def cds_seq\n cds_length = self.coding_region_cdna_end - self.coding_region_cdna_start + 1\n \n return self.seq[(self.coding_region_cdna_start - 1), cds_length]\n end",
"def details\n response = cs_get \"/segments/#{segment_id}.json\", {}\n Hashie::Mash.new(response)\n end",
"def find_page_end\n \n end",
"def test_hash_end_and_start\n\t\ttest_array1 = '2|abb2|George>Amina(16):Henry>James(4):Henry>Cyrus(17):Henry>Kublai(4):George>Rana(1):SYSTEM>Wu(100)|1518892051.753197000|c72d'.split('|').map(&:chomp)\n\t\ttest_array2 = '3|c72d|SYSTEM>Henry(100)|1518892051.764563000|7419'.split('|').map(&:chomp)\n\t\tassert_equal test_array2[1], test_array1[4]\n\tend",
"def stopped_at; end",
"def stopped_at; end",
"def stopped_at; end",
"def segment\n return asset_on_segment_history.segment rescue nil\n end",
"def finish\n found = entries.reverse.find do |en|\n en['description'] and en['description'].match( DYNAMIC_FINISH_LINE )\n end\n return $2.to_i if found\n # FIX: Should 'finish' ever return a db-field in the Toggl module?\n read_attribute( :finish ) || start\n end",
"def test_855\n want = <<~EDI.gsub(/\\n/, \"\")\n ST*855*0001~\n BAK*00*AC*00000007397108*19700101~\n N1*ST*Sweeney Todd~\n N3*2705 Fleet St~\n N4*Birmingham*AL*35226*US~\n PO1*1*1*EA*59.95**UP*860001662184*VP*860001662184~\n PID*F****CBD Topical Cream 400mg THC Free~\n ACK*IA*1*EA****UP*860001662184*VP*860001662184~\n PO1*2*2*EA*49.95**UP*860001662108*VP*860001662108~\n PID*F****500mg Full Spectrum Garden Mint Oil Tincture~\n ACK*IA*2*EA****UP*860001662108*VP*860001662108~\n CTT*2~\n SE*13*0001\n EDI\n store = Eddy::Data::Store.new(time: @epoch)\n ts = Eddy::TransactionSets::TS855::TS.new(store)\n ts.BAK do |bak|\n bak.TransactionSetPurposeCode = \"00\"\n bak.AcknowledgmentType = \"AC\"\n bak.PurchaseOrderNumber = \"00000007397108\"\n bak.Date = @epoch\n end\n ts.L_N1 do |rep|\n # N1\n rep.N1.EntityIdentifierCode = \"ST\"\n rep.N1.Name = \"Sweeney Todd\"\n # N3\n rep.N3.AddressInformation1 = \"2705 Fleet St\"\n # N4\n rep.N4.CityName = \"Birmingham\"\n rep.N4.StateOrProvinceCode = \"AL\"\n rep.N4.PostalCode = \"35226\"\n rep.N4.CountryCode = \"US\"\n end\n ts.L_PO1 do |rep|\n rep.PO1.AssignedIdentification = \"1\"\n rep.PO1.QuantityOrdered = 1\n rep.PO1.UnitOrBasisForMeasurementCode = \"EA\"\n rep.PO1.UnitPrice = 59.95\n rep.PO1.ProductServiceIdQualifier1 = \"UP\"\n rep.PO1.ProductServiceId1 = \"860001662184\"\n rep.PO1.ProductServiceIdQualifier2 = \"VP\"\n rep.PO1.ProductServiceId2 = \"860001662184\"\n rep.L_PID do |rep|\n rep.PID.ItemDescriptionType = \"F\"\n rep.PID.Description = \"CBD Topical Cream 400mg THC Free\"\n end\n rep.L_ACK do |rep|\n rep.ACK.LineItemStatusCode = \"IA\"\n rep.ACK.Quantity = 1\n rep.ACK.UnitOrBasisForMeasurementCode = \"EA\"\n rep.ACK.ACK07 = \"UP\"\n rep.ACK.ACK08 = \"860001662184\"\n rep.ACK.ACK09 = \"VP\"\n rep.ACK.ACK10 = \"860001662184\"\n end\n end\n ts.L_PO1 do |rep|\n rep.PO1.AssignedIdentification = \"2\"\n rep.PO1.QuantityOrdered = 2\n rep.PO1.UnitOrBasisForMeasurementCode = \"EA\"\n rep.PO1.UnitPrice = 49.95\n rep.PO1.ProductServiceIdQualifier1 = \"UP\"\n rep.PO1.ProductServiceId1 = \"860001662108\"\n rep.PO1.ProductServiceIdQualifier2 = \"VP\"\n rep.PO1.ProductServiceId2 = \"860001662108\"\n rep.L_PID do |rep|\n rep.PID.ItemDescriptionType = \"F\"\n rep.PID.Description = \"500mg Full Spectrum Garden Mint Oil Tincture\"\n end\n rep.L_ACK do |rep|\n rep.ACK.LineItemStatusCode = \"IA\"\n rep.ACK.Quantity = 2\n rep.ACK.UnitOrBasisForMeasurementCode = \"EA\"\n rep.ACK.ACK07 = \"UP\"\n rep.ACK.ACK08 = \"860001662108\"\n rep.ACK.ACK09 = \"VP\"\n rep.ACK.ACK10 = \"860001662108\"\n end\n end\n ts.CTT.NumberOfLineItems = 2\n result = ts.render()\n assert_equal(want, result)\n end",
"def pos_rseg_header\n pos_page_body\n end",
"def lq_rx_segments(code)\n lq_rx_segments = []\n lq_rx_segments << 'LQ'\n lq_rx_segments << 'RX'\n lq_rx_segments << (code.blank? ? '' : code.strip)\n Output835.trim_segment(lq_rx_segments)\n lq_rx_segments.join(@element_seperator)\n end",
"def visible_line_number\n @ev_height\n end",
"def seek_section_head\n @psd_file.seek(@parser.color_mode_data.termination_pos, IO::SEEK_SET)\n end",
"def show_section_name_idx\n\t\t\t puts \" Section header string table index: #{@elf_section_name_idx}\"\n\t\tend",
"def find_stations(si, ei, line) # si is short for start index and ei is short for end index\n if ei >= si\n line_stop_count = ei - si\n passing_stations = line[(si+1)..ei].join(\", \")\n else\n line_stop_count = si - ei\n passing_stations = line[(ei)..(si-1)].reverse.join(\", \")\n end\n return {:num_stops => line_stop_count, :stops => passing_stations}\nend",
"def sld; end"
] | [
"0.5510913",
"0.5483743",
"0.5474959",
"0.54597807",
"0.5455413",
"0.5451109",
"0.5449231",
"0.54243255",
"0.54228336",
"0.5385733",
"0.5356914",
"0.53561366",
"0.5348496",
"0.53478235",
"0.5344739",
"0.53081435",
"0.5260658",
"0.5235439",
"0.52218175",
"0.52101743",
"0.5202483",
"0.5197808",
"0.5197808",
"0.51968044",
"0.5171777",
"0.5171777",
"0.5158758",
"0.5152225",
"0.51513773",
"0.5144454",
"0.51018184",
"0.5100906",
"0.5100906",
"0.5097892",
"0.5093474",
"0.5080154",
"0.50654453",
"0.5060694",
"0.5058066",
"0.50550604",
"0.5040875",
"0.50396836",
"0.50377864",
"0.502373",
"0.5020241",
"0.50141376",
"0.5010343",
"0.5003313",
"0.49884492",
"0.49852422",
"0.49775568",
"0.49732438",
"0.497196",
"0.49695686",
"0.4966092",
"0.4956149",
"0.49343964",
"0.49192485",
"0.49153233",
"0.49132094",
"0.49122125",
"0.49032432",
"0.48860124",
"0.48824346",
"0.48743072",
"0.48730406",
"0.48720208",
"0.48603225",
"0.4859411",
"0.48537585",
"0.4852049",
"0.4850488",
"0.4841881",
"0.4841881",
"0.48386902",
"0.48357463",
"0.48348346",
"0.48284104",
"0.48221025",
"0.48193282",
"0.48193282",
"0.48193282",
"0.48176536",
"0.4817038",
"0.48143926",
"0.48101008",
"0.48064578",
"0.48049065",
"0.48023129",
"0.48023129",
"0.48023129",
"0.48019433",
"0.47984055",
"0.47941417",
"0.47938693",
"0.47899967",
"0.47885293",
"0.47826165",
"0.47798115",
"0.4772353",
"0.47720873"
] | 0.0 | -1 |
End of DTM_233 Segment Details Client Specific Helper Methods End of Client Specific Helper Methods Conditions to print a Segment : Overwritten Methods | def verify_ts3_condition
Unified835Output::BenignNull.new
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def end_of_section\n\t$report_file.puts(\"-------END OF SECTION-------\")\n\t$report_file.puts\nend",
"def end_section\n end",
"def end; end",
"def end; end",
"def end; end",
"def end\n end",
"def end\n end",
"def end\n end",
"def end\n end",
"def Com6 # Recuperation Info\n \n end",
"def print_end_header()\n puts \"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\"\nend",
"def end_point; end",
"def main_end ; end",
"def at_end; end",
"def end() end",
"def diagnostic; end",
"def ends\n form_print(ends_at)\n end",
"def ends\n form_print(ends_at)\n end",
"def generate_eobs\n Output835.log.info \"\\n\\nPatient account number : #{@eob.patient_account_number}\"\n Output835.log.info \"This EOB has #{@services.length} service lines\"\n Output835.log.info \"This is a CLAIM LEVEL EOB\" if @is_claim_eob\n claim_segments = [claim_payment_loop, include_claim_dates]\n claim_segments << claim_supplemental_info unless @facility.details[:interest_in_service_line]\n claim_segments << claim_level_allowed_amount\n claim_segments << standard_industry_code_segments(@eob, @is_claim_eob) if @is_claim_eob\n claim_segments << service_payment_info_loop unless @is_claim_eob\n update_clp! claim_segments\n claim_segments.flatten.compact\n end",
"def paragraph_end; end",
"def epilogue\r\n end",
"def print_reservations\n puts \"We do not share our clients info!\"\n puts \"At least for free....\"\n end",
"def print_end\n print_separator_strong\n puts \"GAME OVER!\"\n print_separator_strong\n end",
"def _end!; end",
"def sub_sector; end",
"def on_stop\n puts\n puts '** Thank you for using SequenceServer :).'\n puts ' Please cite: '\n puts ' Priyam A, Woodcroft BJ, Rai V, Munagala A, Moghul I, Ter F,'\n puts ' Gibbins MA, Moon H, Leonard G, Rumpf W & Wurm Y. 2015.'\n puts ' Sequenceserver: a modern graphical user interface for'\n puts ' custom BLAST databases. biorxiv doi: 10.1101/033142.'\n end",
"def incomplete\r\n\r\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << ''\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def end_of_chapter\n end",
"def final_survey_answers\r\n\tputs \"Here are your final survey answers\"\r\n\tputs client_info\r\nend",
"def output_ris_end\n \"\"\n end",
"def testing_end\n end",
"def section_seperator\n puts <<EOF\n\n\n\n=========================\n=========================\nEOF\nend",
"def endPage()\n @device.endPage() ;\n end",
"def service_prov_name\n Output835.log.info \"Printing NM1*82 for Patient Acc Num : #{eob.patient_account_number}\"\n prov_id, qualifier = service_prov_identification\n service_prov_name_elements = []\n service_prov_name_elements << 'NM1'\n service_prov_name_elements << '82'\n service_prov_name_elements << (eob.rendering_provider_last_name.to_s.strip.blank? ? '2': '1')\n service_prov_name_elements << prov_last_name_or_org\n service_prov_name_elements << eob.rendering_provider_first_name\n service_prov_name_elements << eob.rendering_provider_middle_initial\n service_prov_name_elements << ''\n service_prov_name_elements << eob.rendering_provider_suffix\n service_prov_name_elements << qualifier\n service_prov_name_elements << prov_id\n service_prov_name_elements = Output835.trim_segment(service_prov_name_elements)\n service_prov_name_elements.join(@element_seperator)\n end",
"def section; end",
"def beginprintext(*)\n super\n end",
"def exit_simulation!(line='')\n @packet.send_packet\n if line != ''\n print!(:error, line) #add errormessages\n else\n print!(:log, 'Ausführung beendet!') #add endmessage\n end\n @packet.add_operation('exit')\n connection_store[:is_simulation_done] = true\nend",
"def view_services_header\n clear\n puts\n puts HEADER_LINE\n puts \"View services\".upcase.center(HEADER_LENGTH)\n puts HEADER_LINE\n puts\nend",
"def print_divider\n\t\tputs \"------------------------------------------------------------\"\n\tend",
"def generate_section_end(format, main_section)\n case format\n when 'php'\n \" ],\\n\"\n else\n ''\n end\n end",
"def onEnd\r\n end",
"def usage_end\n '}'\n end",
"def print_end_round\n print_separator_strong\n print_msg('round_complete')\n print_separator_strong\n end",
"def process_end()\n if self.parser.options[:list_activities]\n @sp.activity_map.keys.sort.each { |ak| self.output.printf(\"#-- Activity %s\\n\", ak) }\n end\n\n if self.parser.options[:map_activities]\n @sp.activity_map.keys.sort.each do |ak|\n self.output.printf(\"#-- Activity %s\\n\", ak)\n self.activities_map[ak].each do |url|\n self.output.printf(\"#-- Park %s\\n\", url)\n end\n end\n end\n\n super()\n end",
"def process_interface_additions\n print <<EOF\n procedure :: ci_initialize\n procedure :: ci_set_communicator\n procedure :: ci_delete\n procedure :: ci_create\n procedure :: ci_split\nEOF\nend",
"def line\n puts \"########################################################\"\nend",
"def terminator; end",
"def terminator; end",
"def appt_conf\n puts \"Thank you, your appointment was created!\"\n puts \" \"\n puts \"***************************************************** \"\n puts \" \"\n end",
"def complete_investigation; end",
"def main_display\n puts \"Please choose the action you would like to perform?\" \\\n \"\\n1. Enroll into a department\" \\\n \"\\n2. Change your department\" \\\n \"\\n3. Change your section\" \\\n \"\\n4. View details\"\nend",
"def end_field; end",
"def print_out_line\n\t\t\t#p ['id', id, 'ctd', ctd]\n\t\t\t#p rcp.results.zip(rcp.results.map{|r| send(r)})\n\t\t\tname = @run_name\n\t\t\tname += \" (res: #@restart_id)\" if @restart_id\n\t\t\tname += \" real_id: #@real_id\" if @real_id\n\t\t\tbeginning = sprintf(\"%2d:%d %-60s %1s:%2.1f(%s)\", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s)\n\t\t\tif @status == :Incomplete and @completed_timesteps\n\t\t\t\tbeginning += sprintf(\" %d steps \", @completed_timesteps)\n\t\t\telsif @percent_complete\n \t\t\t\tbeginning+=sprintf(\" %3s%1s \", percent_complete, \"%\")\n\t\t\tend\n\t\t\tif ctd\n\t\t\t\t#beginning += sprintf(\"Q:%f, Pfusion:%f MW, Ti0:%f keV, Te0:%f keV, n0:%f x10^20\", fusionQ, pfus, ti0, te0, ne0)\n\t\t\tend\n\t\t\tbeginning += \" ---#{@comment}\" if @comment\n\t\t\tbeginning\n\t\tend",
"def end_line(kind); end",
"def process_statement_end(tk); end",
"def print_footer\n # TODO: Design footers.\n mvaddstr(@lines - 1, 10, 'sample footer')\n end",
"def print_out_line\n name = @run_name\n name += \" (res: #@restart_id)\" if @restart_id\n name += \" real_id: #@real_id\" if @real_id\n beginning = sprintf(\"%2d:%d %-60s %1s:%2.1f(%s) %3s%1s\", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s, percent_complete, \"%\")\n if ctd\n #beginning += sprintf(\"Q:%f, Pfusion:%f MW, Ti0:%f keV, Te0:%f keV, n0:%f x10^20\", fusionQ, pfus, ti0, te0, ne0)\n end\n beginning += \" ---#{@comment}\" if @comment\n beginning\n end",
"def app_mgmt_demo(screen, button)\n\n $instruct1 = screen.get_named_widget(\"Instruct1\")\n $instruct2 = screen.get_named_widget(\"Instruct2\")\n $instruct3 = screen.get_named_widget(\"Instruct3\")\n $instruct4 = screen.get_named_widget(\"Instruct4\")\n $instruct5 = screen.get_named_widget(\"Instruct5\")\n $instruct6 = screen.get_named_widget(\"Instruct6\")\n $instruct7 = screen.get_named_widget(\"Instruct7\")\n\n if (button == \"INFO\")\n \n display(\"CFS_KIT APP_MGMT_DEMO_INFO_SCREEN\",500,50) \n\n elsif (button == \"NEXT\")\n \n $amd_step += 1\n $amd_demo = 0\n \n if ($amd_step <= AMD_LAST_STEP)\n amd_set_instruct_text($amd_step)\n end\n\n case $amd_step\n when 1\n display(\"CFS_KIT APP_MGMT_SCREEN\",1500,50) \n display(\"SIMSAT CFS_APP_SCREEN\",50,50) \n cmd(\"CFE_EVS ENA_APP_EVENT_TYPE with APP_NAME CFE_ES, BITMASK 0x01\") # Enable debug events\n wait(2) \n cmd(\"CFE_EVS ENA_APP_EVENT_TYPE with APP_NAME CFE_EVS, BITMASK 0x01\") # Enable debug events\n when 2..AMD_LAST_STEP\n # Keep case statement for maintenance\n else\n cmd(\"CFE_EVS DIS_APP_EVENT_TYPE with APP_NAME CFE_EVS, BITMASK 0x01\") # Disable debug events\n wait(2) \n cmd(\"CFE_EVS DIS_APP_EVENT_TYPE with APP_NAME CFE_ES, BITMASK 0x01\") # Disable debug events\n $amd_step = 0\n clear(\"CFS_KIT APP_MGMT_SCREEN\") \n clear(\"SIMSAT CFS_APP_SCREEN\") \n clear(\"CFS_KIT APP_MGMT_DEMO_SCREEN\")\n clear(\"CFS_KIT APP_MGMT_DEMO_INFO_SCREEN\")\n end # Step Case\n \n elsif (button == \"DEMO\")\n \n case $amd_step\n\n # Lookup a symbol\n when 1\n if ($amd_demo == 0)\n Osk::flight.cfe_es.send_cmd(\"SEND_APP_INFO with APP_NAME #{AMD_INFO_APP}\")\n # Don't increment amd_demo; okay if user repeatedly sends lookup cmd\n end\n \n # 2 - Enable/disable app events\n when 2\n case $amd_demo\n when 0 \n Osk::flight.cfe_evs.send_cmd(\"DIS_APP_EVENT_TYPE with APP_NAME #{AMD_INFO_APP}, BITMASK 0x02\") # Disable info events\n $amd_demo += 1\n when 1 \n cmd(\"#{AMD_INFO_APP} NOOP\")\n $amd_demo += 1\n when 2 \n Osk::flight.cfe_evs.send_cmd(\"ENA_APP_EVENT_TYPE with APP_NAME #{AMD_INFO_APP}, BITMASK 0x02\") # Enable info events\n $amd_demo += 1\n when 3\n cmd(\"#{AMD_INFO_APP} NOOP\")\n $amd_demo = 0\n else\n $amd_demo = 0\n end # Case $amd_demo\n\n # 3 - Stop App\n when 3\n case $amd_demo\n when 0\n Osk::flight.cfe_es.send_cmd(\"SEND_APP_INFO with APP_NAME #{AMD_LOAD_APP}\")\n $amd_demo += 1\n when 1\n cmd(\"#{AMD_LOAD_APP} NOOP\")\n $amd_demo += 1\n when 2\n Osk::flight.cfe_es.send_cmd(\"STOP_APP with APP_NAME #{AMD_LOAD_APP}\")\n $amd_demo += 1\n when 3\n cmd(\"#{AMD_LOAD_APP} NOOP\")\n $amd_demo += 1\n end # Case $amd_demo\n\n # 4 - Start App\n when 4\n if ($amd_demo == 0)\n Osk::flight.cfe_es.send_cmd(\"START_APP with APP_NAME #{AMD_LOAD_APP}, \\\n APP_ENTRY_POINT MD_AppMain, \\\n APP_FILENAME #{AMD_LOAD_APP_FILE}, \\\n STACK_SIZE 16384,\\\n EXCEPTION_ACTION 0, \\\n PRIORITY 100\")\n \n $amd_demo += 1\n elsif ($amd_demo == 1)\n Osk::flight.cfe_es.send_cmd(\"SEND_APP_INFO with APP_NAME #{AMD_LOAD_APP}\")\n $amd_demo += 1\n elsif ($amd_demo == 2)\n cmd(\"#{AMD_LOAD_APP} NOOP\")\n # Don't increment $amd_demo, let user continually send noop \n end\n\n end # Step Case\n end # Demo\n \nend",
"def print_out_line\n #p ['id', id, 'ctd', ctd]\n #p rcp.results.zip(rcp.results.map{|r| send(r)})\n name = @run_name\n name += \" (res: #@restart_id)\" if @restart_id\n name += \" real_id: #@real_id\" if @real_id\n beginning = sprintf(\"%2d:%d %-60s %1s:%2.1f(%s) %3s%1s\", @id, @job_no, name, @status.to_s[0,1], @run_time.to_f / 60.0, @nprocs.to_s, percent_complete, \"%\")\n if ctd and fusionQ\n beginning += sprintf(\"Q:%f, Pfusion:%f MW, Ti0:%f keV, Te0:%f keV, n0:%f x10^20\", fusionQ, pfus, ti0, te0, ne0)\n end\n beginning += \" ---#{@comment}\" if @comment\n beginning\n end",
"def end_special_list\n end",
"def print_segments\n\t\t# METHOD: after all of the HL7 content has been parsed, print the contents of each segment in a more easily readible format\n\t\t# output for 1 segment looks like:\n\t\t\t\t\t#~ :: Segment: PID\n\t\t\t\t\t#~ PID-0: Segment => PID\n\t\t\t\t\t#~ PID-1: Set ID - PID => 1\n\t\t\t\t\t#~ PID-2: Patient ID => \n\t\t\t\t\t#~ PID-3: Patient Identifier List => [[\"P00057804\", \"\", \"\", \"\", \"PN\"], [\"4009887514\", \"\", \"\", \"AUSHIC\", \"MC\"], [\"SMIAL001\", \"\", \"\", \"\", \"PI\"]]\n\t\t\t\t\t#~ PID-4: Alternate Patient ID - PID => \n\t\t\t\t\t#~ PID-5: Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\n\t\t\t\t\t#~ PID-6: Mother’s Maiden Name => \n\t\t\t\t\t#~ PID-7: Date/Time of Birth => 19770621\n\t\t\t\t\t#~ PID-8: Sex => M\n\t\t\t\t\t#~ PID-9: Patient Alias => \n\t\t\t\t\t#~ PID-10: Race => \n\t\t\t\t\t#~ PID-11: Patient Address => [\"818 Beach Road\", \"\", \"BEECHMERE\", \"\", \"4510\", \"AU\", \"H\"]\n\n\t\t\t# iterate over each segment\n\t\t\t@parsed_content.each do |segment|\n\t\t\t\tseg = segment[0]\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # eg => \"PID\"\n\t\t\t\t\n\t\t\t\t#get yaml file details\n\t\t\t\tyamlfile = \"hl7specification/#{seg}\"\t\t\t\t\t# for each segment, find the appropriate yaml file (ie one for each segment)\n\t\t\t\tspecs = YAML.load_file(yamlfile)\t\t\t\t\t\t\t# load the yaml file\n\t\t\t\t\n\t\t\t puts \":: #{specs[\"Header\"][\"name\"]} (#{seg})\"\t\t\t# print the text eg \":: Message Header Segment (MSH)\"\n\t\t\t \n\t\t\t # then iterate over each field in the particular segment\n\t\t\t\tsegment.each_with_index do |field, index|\t\t\t\t\t# then for each field...\n\t\t\t\t\tif index > 0 then\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# only if the index is 1 or more (ie the first value is not useful here)\n\t\t\t\t\t\tfld = \"#{seg}-#{index}\"\t\t\t\t\t\t\t\t\t # get the field id => \"PID-5\"\n\t\t\t\t\t\tprint \" #{fld}: \"\t\t\t\t\t\t \t\t\t\t\t\t# on each line print the particular field being queried eg \"PID-5: \"\n\t\t\t\t\t\tfldname = specs[fld][\"name\"]\t\t\t\t\t\t\t\t\t# get the name of the field from the yaml file\n\t\t\t\t\t\tprint \"#{fldname} => \"\t\t\t\t\t\t\t\t\t\t\t\t# print the field name after the field eg \"PID-5: Patient Name\"\n\t\t\t\t\t\tif field.class == String then\t\t\t\t\t\t\t\t\t# if the field class is a string...\n\t\t\t\t\t\t\tputs field\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# then just print (ie add) the value of the string eg \"PID-7: Date/Time of Birth => 19770621\"\n\t\t\t\t\t\telsif field.class == Array then\t\t\t\t\t\t\t\t# otherwise if the field is an array, ie there is lower level structure...\n\t\t\t\t\t\t\tputs field.inspect\t\t\t\t\t\t\t\t\t\t\t\t\t# then print the structure eg \"PID-5 Patient Name => [\"SMITH\", \"Alan\", \"Ross\", \"\", \"Mr\"]\"\n\t\t\t\t\t\tend # << end if field...\n\t\t\t\t\tend # << end if index > 0 \n\t\t\t\tend\t # << end segment.each_with_index\n\t\t\t\tputs\t\n\t\t end\t # << end @parsed_content.each\n\t \n\t end",
"def endLoadingScreen \n \"endLoadingScreen\" \n end",
"def find_page_end\n \n end",
"def printInterface(pageName, concreteCode, ajaxcontrols=nil, css=nil, effects=nil, appData=\"\")\r\n findAjaxControls(ajaxcontrols) unless ajaxcontrols.nil?\r\n findCSS(css) unless css.nil?\r\n findEffects(effects) unless effects.nil?\r\n data = get_events(pageName)\r\n @eventData << \"if (interfaceName == '#{pageName}') { \\n\"\r\n data = \"{\\\"nodes\\\":[#{data}]}\" unless data.nil? or data.blank?\r\n @eventData << \"return '#{data}' \\n\" \r\n @eventData << \"} \\n\"\r\n data = get_decorations(pageName)\r\n @animData << \"if (interfaceName == '#{pageName}') { \\n\"\r\n data = \"{\\\"nodes\\\":[#{data}]}\" unless data.nil? or data.blank?\r\n @animData << \"return '#{data}' \\n\" \r\n @animData << \"} \\n\"\r\n data = get_transitions(pageName)\r\n @transData << \"if (interfaceName == '#{pageName}') { \\n\"\r\n data = \"{\\\"nodes\\\":[#{data}]}\" unless data.nil? or data.blank?\r\n @transData << \"return '#{data}' \\n\" \r\n @transData << \"} \\n\"\r\n begin\r\n # Exceptions raised by this code will\r\n # be caught by the following rescue clause\r\n concreteCode.each { |line|\r\n @bodyContent << line\r\n divId = line.slice(/<div id='\\S+'/)\r\n if (!divId.nil?)\r\n divId.slice!('<div id=\\'')\r\n divId.slice!('\\'')\r\n interfaceToInclude = @references[divId]\r\n if (!interfaceToInclude.nil?)\r\n interfObj = SWUI::Interface.find_by.interface_name(interfaceToInclude).execute.first\r\n\r\n if (interfObj.dynamic==\"true\") then\r\n compiler = AICompiler.new\r\n abstr_spec= SWUI::AbsInterface.preCompile(interfObj.abstract_spec.first,true, appData).first\r\n compiler.parseXML(abstr_spec)\r\n interfObj.concrete_code = compiler.concrete_code\r\n end\r\n \r\n codeToInclude = interfObj.concrete_code.first\r\n ajaxcontrolsToInclude = interfObj.ajaxcontrols.first\r\n cssToInclude = interfObj.concrete_interfaces.first\r\n effectsToInclude= interfObj.effects.first\r\n printInterface(interfaceToInclude, codeToInclude, ajaxcontrolsToInclude, cssToInclude, effectsToInclude, appData)\r\n end\r\n end\r\n }\r\n return true\r\n rescue Exception\r\n @bodyContent << \"Interface #{pageName} not found.\\n\"\r\n @bodyContent << $! \r\n return false\r\n end\r\n end",
"def end(p0) end",
"def segments; end",
"def printing\n \n # \"print\" print data in console without linebrean by default\n print \"HELLO WORLD! \"\n # \"puts\" print data in console with linebreak\n puts \"Hello World!\"\n puts 2+2\n\n#\"end\" finalize the function \nend",
"def display_profile_options\n puts \"1. Back to Main Menu\"\n puts \"2. Remove a Beer from My Interests\"\n puts \"3. Refresh Page\"\n puts \"4. Logout\"\n print_line\nend",
"def diagnostics; end",
"def diagnostics; end",
"def diagnostics; end",
"def diagnostics; end",
"def display_details()\n puts \"Customer id is #{@id} \"\n puts \"Customer name is #{@name} \"\n puts \"Customer address is #{@address} \"\n end",
"def details; end",
"def on_investigation_end; end",
"def on_investigation_end; end",
"def end_document; end",
"def end_document; end",
"def report_stolen\n # binding.pry\n self.vhs.sample.destroy\n puts \"THANK YOU FOR YOUR REPORT. WE WILL LAUNCH AN INVESTIGATION.\"\n end",
"def instruction\n @spliter = '-----------CSAIR-------------------------'\n puts @spliter\n puts 'Welcome to the CSair interface'\n puts 'You can use the following instruction to interact with the databse'\n puts 'Case is not sensitive in this interface'\n puts 'GetCity [CITY CODE] will return the information of the city'\n puts 'GetInfo will return the information of CSAIR'\n puts 'Browser will return the picture which has all the routes of CSAIR'\n puts 'GetAllCity will return a list of cities that CSAIR fly to.'\n puts 'EditCity will bring you to edit mode where you can edit a city'\n puts 'Editroute will bring you to the edit mode where you can edit a route.'\n puts 'Save will save the current graph onto the data file.'\n puts 'Load will reload the graph from the data file. Current progress will be lost.'\n puts 'Merge [file_path] will let you read and merge the file into our current graph'\n puts 'Checkroutes [metro1] [metro2]......[metro.n] will check the routes and give you some feekbacks'\n puts 'Shortest [City Code1] [city Code2] will return the path from city 1 to city 2.'\n puts 'Test and ExitTest will enter or exit the testing mode'\n puts 'Help will print this menu again.'\n puts @spliter\nend",
"def tag_end(var_Data)\n\n Common::Logger.print(Common::VAR_DEBUG, self, \n \"[TAG_END]: given data= [#{var_Data}] for current object name=[\" + @VAR_CURRENT_OBJECT_NAME.to_s() +\n \"], CMPX INDEX=\" + @VAR_COMPLEX_OBJECT_INDEX.to_s())\n \n if(@VAR_CURRENT_OBJECT_NAME == nil)\n Common::Logger.print(Common::VAR_DEBUG, self, \n \"[WARNING], closing not existing tag!\" + @VAR_COMPLEX_OBJECTS_NAMES[@VAR_COMPLEX_OBJECT_INDEX-1] + \n \", \" + @VAR_COMPLEX_OBJECTS[@VAR_COMPLEX_OBJECT_INDEX-1].to_s())\n \n storeComplexObject(@VAR_COMPLEX_OBJECTS[@VAR_COMPLEX_OBJECT_INDEX-1])\n else\n \n # If the copmlex object index is equal 0, it means that\n # there wasn't any complex object detected yet and the\n # current object is a simple object.\n if(@VAR_COMPLEX_OBJECT_INDEX == 0)\n Common::Logger.print(Common::VAR_DEBUG, self, \"[TAG_END]: storing into SBAStore\")\n \n storeObject(@VAR_CURRENT_OBJECT)\n \n @VAR_CURRENT_OBJECT_NAME = nil\n @VAR_CURRENT_OBJECT = nil\n else\n Common::Logger.print(Common::VAR_DEBUG, self, \"[TAG_END]: storing into array [\" + @VAR_CURRENT_OBJECT.to_s() + \"]\")\n \n storeObject(@VAR_CURRENT_OBJECT)\n \n @VAR_CURRENT_OBJECT_NAME = nil\n @VAR_CURRENT_OBJECT = nil\n end\n end\n end",
"def beginprint(*)\n super\n end",
"def endnote\n Rails.logger.info(\"es287_debug #{__FILE__}:#{__LINE__} params = #{params.inspect}\")\n if params[:id].nil?\n bookmarks = token_or_current_or_guest_user.bookmarks\n bookmark_ids = bookmarks.collect { |b| b.document_id.to_s }\n Rails.logger.debug(\"es287_debug #{__FILE__}:#{__LINE__} bookmark_ids = #{bookmark_ids.inspect}\")\n Rails.logger.debug(\"es287_debug #{__FILE__}:#{__LINE__} bookmark_ids size = #{bookmark_ids.size.inspect}\")\n if bookmark_ids.size > BookBagsController::MAX_BOOKBAGS_COUNT\n bookmark_ids = bookmark_ids[0..BookBagsController::MAX_BOOKBAGS_COUNT] \n end\n @response, @documents = search_service.fetch(bookmark_ids, :per_page => 1000,:rows => 1000)\n Rails.logger.debug(\"es287_debug #{__FILE__}:#{__LINE__} @documents = #{@documents.size.inspect}\")\n else\n @response, @documents = search_service.fetch(params[:id])\n end\n fmt = params[:format]\n Rails.logger.debug(\"es287_debug #{__FILE__}:#{__LINE__} #{__method__} = #{fmt}\")\n respond_to do |format|\n format.endnote_xml { render \"show.endnote_xml\" ,layout: false } \n format.endnote { render :layout => false } #wrapped render :layout => false in {} to allow for multiple items jac244\n format.ris { render 'ris', :layout => false }\n end\n end",
"def print_footer\n if @students.count == 1\n puts \"Overall we have #{@students.count} great student.\".center(40)\n else\n puts \"Overall we have #{@students.count} great students.\".center(40)\n end\n puts # spacer line\nend",
"def footer\n end",
"def hline_top\n\tputs \"__________________________________________________________\"\nend",
"def print_footer\n @students.count==1 ? final_word = \"student\" : final_word = \"students\"\n puts \"Overall, we have #{@students.count} great #{final_word}\".center(@center_by)\nend",
"def print_data\n\n # Print customer information.\n box_height = 0.166667\n y = 8.9\n self.vms_text_box(\"Attn: <strong>PIERRE PAROZ</strong>\", 0.5, y, 3.5, box_height, 10, :normal, :left, :center)\n self.vms_text_box(\"AMEMIC\", 4.5, y, 3.5, box_height, 10, :bold, :left, :center)\n self.vms_text_box(\"Date: <strong>12/13/18</strong>\", 4.5, y, 3.5, box_height, 10, :normal, :right, :center)\n y -= box_height\n self.vms_text_box(\"AMERICAN MICRO PRODUCTS, INC.\", 0.5, y, 3.5, box_height, 10, :bold, :left, :center)\n y -= box_height\n self.vms_text_box(\"4288 ARMSTRONG BLVD\", 0.5, y, 3.5, box_height, 10, :bold, :left, :center)\n self.vms_text_box(\"Phone:\", 4.5, y, 0.75, box_height, 10, :normal, :left, :center)\n self.vms_text_box(\"(513) 732-2674\", 5.15, y, 3.5, box_height, 10, :bold, :left, :center)\n y -= box_height\n self.vms_text_box(\"BATAVIA, OH 45103-1600\", 0.5, y, 3.5, box_height, 10, :bold, :left, :center)\n self.vms_text_box(\"Fax:\", 4.5, y, 0.75, box_height, 10, :normal, :left, :center)\n self.vms_text_box(\"(513) 732-3535\", 5.15, y, 1.5, box_height, 10, :bold, :left, :center)\n self.vms_text_box(\"Ext\", 6.4, y, 0.5, box_height, 10, :normal, :left, :center)\n self.vms_text_box(\"032\", 6.7, y, 1, box_height, 10, :bold, :left, :center)\n y -= (box_height + 0.25)\n\n # Draw quotation box.\n fill_color('000000')\n fill_rectangle([0.25.in, y.in], 8.in, 0.4.in)\n stroke_rectangle([0.25.in, y.in], 8.in, 0.4.in)\n self.vms_text_box(\"VMS Quote # <strong>71295</strong>\", 0.35, y, 3.8, 0.4, 12, :normal, :left, :center, nil, 'ffffff')\n self.vms_text_box(\"Your Request # <strong>Q02I7446</strong>\", 4.35, y, 3.8, 0.4, 10, :normal, :right, :center, nil, 'ffffff')\n y -= 0.4\n fill_color('cccccc')\n fill_rectangle([0.25.in, y.in], 8.in, 0.25.in)\n stroke_rectangle([0.25.in, y.in], 2.in, 0.25.in)\n stroke_rectangle([2.25.in, y.in], 4.5.in, 0.25.in)\n stroke_rectangle([6.75.in, y.in], 1.5.in, 0.25.in)\n self.vms_text_box(\"Part Number\", 0.25, y, 2, 0.25, 10, :bold, :center, :center)\n self.vms_text_box(\"Part Description & Process Specification\", 2.25, y, 4.5, 0.25, 10, :bold, :center, :center)\n self.vms_text_box(\"EAU\", 6.75, y, 1.5, 0.25, 10, :bold, :center, :center)\n quotation_lines = 9\n y -= 0.25\n stroke_rectangle([0.25.in, y.in], 2.in, (quotation_lines * _p(10)).in)\n stroke_rectangle([2.25.in, y.in], 4.5.in, (quotation_lines * _p(10)).in)\n stroke_rectangle([6.75.in, y.in], 1.5.in, (quotation_lines * _p(10)).in)\n self.vms_text_box(\"627591\\n \\n \\n \\n \\n \\n \", 0.35, y, 1.8, quotation_lines * _p(10), 10, :normal, :left, :center)\n self.vms_text_box(\"POLE PIECE\\n\\n12L14 STEEL X 12.3MM OD X 7MM ID\\n& 1.90MM THRU HOLE X 13.9MM LONG\\n\\nZINC-NICKEL (.0003\\\" MINIMUM) &\\nCLEAR TRIVALENT CHROMATE\", 2.35, y, 4.3, quotation_lines * _p(10), 10, :normal, :left, :center)\n self.vms_text_box(\"500,000 PCS\\n \\n \\n \\n \\n \\n \", 6.85, y, 1.3, quotation_lines * _p(10), 10, :normal, :center, :center)\n y -= quotation_lines * _p(10)\n remarks_lines = 4\n stroke_rectangle([0.25.in, y.in], 8.in, (remarks_lines * _p(10)).in)\n self.vms_text_box(\"<em>Remarks:</em>\\nPLEASE NOTE: FORD WSA-M1P87-A1 IS OBSOLETE.\\nUSING FORD WSS-M1P87-B1 FOR PURPOSE OF THIS QUOTATION.\", 0.35, y, 7.8, remarks_lines * _p(10), 10, :normal, :left, :center)\n y -= remarks_lines * _p(10)\n fill_color('000000')\n fill_rectangle([0.25.in, y.in], 8.in, 0.32.in)\n stroke_rectangle([0.25.in, y.in], 8.in, 0.32.in)\n self.vms_text_box(\"Price: <strong>$0.028/each</strong>\", 0.35, y, 3.8, 0.32, 10, :normal, :left, :center, nil, 'ffffff')\n self.vms_text_box(\"Minimum Lot Charge: <strong>$250.00</strong>\", 4.35, y, 3.8, 0.32, 10, :normal, :right, :center, nil, 'ffffff')\n y -= 0.57\n\n fill_color('000000')\n fill_rectangle([0.25.in, y.in], 8.in, 0.4.in)\n stroke_rectangle([0.25.in, y.in], 8.in, 0.4.in)\n self.vms_text_box(\"VMS Quote # <strong>71296</strong>\", 0.35, y, 3.8, 0.4, 12, :normal, :left, :center, nil, 'ffffff')\n self.vms_text_box(\"Your Request # <strong>Q02I7447</strong>\", 4.35, y, 3.8, 0.4, 10, :normal, :right, :center, nil, 'ffffff')\n y -= 0.4\n fill_color('cccccc')\n fill_rectangle([0.25.in, y.in], 8.in, 0.25.in)\n stroke_rectangle([0.25.in, y.in], 2.in, 0.25.in)\n stroke_rectangle([2.25.in, y.in], 4.5.in, 0.25.in)\n stroke_rectangle([6.75.in, y.in], 1.5.in, 0.25.in)\n self.vms_text_box(\"Part Number\", 0.25, y, 2, 0.25, 10, :bold, :center, :center)\n self.vms_text_box(\"Part Description & Process Specification\", 2.25, y, 4.5, 0.25, 10, :bold, :center, :center)\n self.vms_text_box(\"EAU\", 6.75, y, 1.5, 0.25, 10, :bold, :center, :center)\n quotation_lines = 9\n y -= 0.25\n stroke_rectangle([0.25.in, y.in], 2.in, (quotation_lines * _p(10)).in)\n stroke_rectangle([2.25.in, y.in], 4.5.in, (quotation_lines * _p(10)).in)\n stroke_rectangle([6.75.in, y.in], 1.5.in, (quotation_lines * _p(10)).in)\n self.vms_text_box(\"627599\\n \\n \\n \\n \\n \\n \", 0.35, y, 1.8, quotation_lines * _p(10), 10, :normal, :left, :center)\n self.vms_text_box(\"ARMATURE\\n\\n12L14 STEEL X 12.3MM & 10.3MM &\\n4.5MM & 1.90MM OD X 50.05MM LONG\\n\\nZINC-NICKEL (.0003\\\" MINIMUM) &\\nCLEAR TRIVALENT CHROMATE\", 2.35, y, 4.3, quotation_lines * _p(10), 10, :normal, :left, :center)\n self.vms_text_box(\"500,000 PCS\\n \\n \\n \\n \\n \\n \", 6.85, y, 1.3, quotation_lines * _p(10), 10, :normal, :center, :center)\n y -= quotation_lines * _p(10)\n remarks_lines = 4\n stroke_rectangle([0.25.in, y.in], 8.in, (remarks_lines * _p(10)).in)\n self.vms_text_box(\"<em>Remarks:</em>\\nPLEASE NOTE: FORD WSA-M1P87-A1 IS OBSOLETE.\\nUSING FORD WSS-M1P87-B1 FOR PURPOSE OF THIS QUOTATION.\", 0.35, y, 7.8, remarks_lines * _p(10), 10, :normal, :left, :center)\n y -= remarks_lines * _p(10)\n fill_color('000000')\n fill_rectangle([0.25.in, y.in], 8.in, 0.32.in)\n stroke_rectangle([0.25.in, y.in], 8.in, 0.32.in)\n self.vms_text_box(\"Price: <strong>$0.091/each</strong>\", 0.35, y, 3.8, 0.32, 10, :normal, :left, :center, nil, 'ffffff')\n self.vms_text_box(\"Minimum Lot Charge: <strong>$250.00</strong>\", 4.35, y, 3.8, 0.32, 10, :normal, :right, :center, nil, 'ffffff')\n y -= 0.57\n\n return\n\n # Print data table.\n self.vms_text_box(\"LN\", 0.25, 7.95, 0.5, 8 * _p(10), 10, :normal, :center, :top)\n self.vms_text_box(\"627591\\nQuote # <strong>71295</strong>\", 0.8, 7.95, 1.9, 8 * _p(10), 10, :normal, :left, :top)\n self.vms_text_box(\"POLE PIECE\\n\\n12L14 STEEL X 12.3MM OD X 7MM ID\\n& 1.90MM THRU HOLE X 13.9MM LONG\\n\\nZINC-NICKEL (.0003\\\" MINIMUM) &\\nCLEAR TRIVALENT CHROMATE\", 2.8, 7.95, 3.15, 8 * _p(10), 10, :normal, :left, :top)\n self.vms_text_box(\"500,000 PCS\", 6.05, 7.95, 1.15, 8 * _p(10), 10, :normal, :center, :top)\n self.vms_text_box(\".028\\n$/EACH\\n\\n<em>MINIMUM</em>:\\n$250.00\", 7.3, 7.95, 0.9, 8 * _p(10), 10, :normal, :center, :top)\n fill_color('eeeeee')\n fill_rectangle([0.5.in, 6.75.in], 7.5.in, (8 * _p(10)).in)\n stroke_rectangle([0.5.in, 6.75.in], 7.5.in, (8 * _p(10)).in)\n self.vms_text_box(\"PLEASE NOTE: FORD WSA-M1P87-A1 IS OBSOLETE.\\nUSING FORD WSS-M1P87-B1 FOR PURPOSE OF THIS QUOTATION.\\n\\nYOUR REQUEST NUMBER <strong>Q0217446</strong>\\n\\nPLEASE REFER TO OUR QUOTATION NUMBER <strong>71295</strong> ON ALL CORRESPONDENCE OR ORDERS\", 0.5, 6.75, 7.5, 8 * _p(10), 10, :normal, :center, :center)\n self.vms_text_box(\"LN\", 0.25, 5.5, 0.5, 8 * _p(10), 10, :normal, :center, :top)\n self.vms_text_box(\"627599\\nQuote # <strong>71296</strong>\", 0.8, 5.5, 1.9, 8 * _p(10), 10, :normal, :left, :top)\n self.vms_text_box(\"ARMATURE\\n\\n12L14 STEEL X 12.3MM & 10.3MM &\\n4.5MM & 1.9MM OD X 50.05MM LONG\\n\\nZINC-NICKEL (.0003\\\" MINIMUM) &\\nCLEAR TRIVALENT CHROMATE\", 2.8, 5.5, 3.15, 8 * _p(10), 10, :normal, :left, :top)\n self.vms_text_box(\"500,000 PCS\", 6.05, 5.5, 1.15, 8 * _p(10), 10, :normal, :center, :top)\n self.vms_text_box(\".091\\n$/EACH\\n\\n<em>MINIMUM</em>:\\n$250.00\", 7.3, 5.5, 0.9, 8 * _p(10), 10, :normal, :center, :top)\n fill_color('eeeeee')\n fill_rectangle([0.5.in, 4.3.in], 7.5.in, (8 * _p(10)).in)\n stroke_rectangle([0.5.in, 4.3.in], 7.5.in, (8 * _p(10)).in)\n self.vms_text_box(\"PLEASE NOTE: FORD WSA-M1P87-A1 IS OBSOLETE.\\nUSING FORD WSS-M1P87-B1 FOR PURPOSE OF THIS QUOTATION.\\n\\nYOUR REQUEST NUMBER <strong>Q0217447</strong>\\n\\nPLEASE REFER TO OUR QUOTATION NUMBER <strong>71296</strong> ON ALL CORRESPONDENCE OR ORDERS\", 0.5, 4.3, 7.5, 8 * _p(10), 10, :normal, :center, :center)\n\n\n \n end",
"def endnote\n @response, @documents = get_solr_response_for_field_values(SolrDocument.unique_key,params[:id])\n respond_to do |format|\n format.endnote { render :layout => false } #wrapped render :layout => false in {} to allow for multiple items jac244\n format.mendeley { render :layout => false } \n end\n end",
"def display_intro\n # Display Company Logo\n display_logo\n # Display Emergency Information\n display_emergency_info\n end",
"def print0\n end",
"def add_end_marker_here(marker='--')\n register_a_parm End.new(marker, howmany: :NONE, debug: @debug)\n end",
"def recorder_mgmt_demo(screen, button)\r\n\r\n $instruct1 = screen.get_named_widget(\"Instruct1\")\r\n $instruct2 = screen.get_named_widget(\"Instruct2\")\r\n $instruct3 = screen.get_named_widget(\"Instruct3\")\r\n $instruct4 = screen.get_named_widget(\"Instruct4\")\r\n $instruct5 = screen.get_named_widget(\"Instruct5\")\r\n $instruct6 = screen.get_named_widget(\"Instruct6\")\r\n $instruct7 = screen.get_named_widget(\"Instruct7\")\r\n\r\n if (button == \"INFO\")\r\n \r\n display(\"CFS_KIT RECORDER_MGMT_DEMO_INFO_SCREEN\",500,50) \r\n\r\n elsif (button == \"NEXT\")\r\n \r\n $rmd_step += 1\r\n $rmd_demo = 0\r\n \r\n if ($rmd_step <= RMD_LAST_STEP)\r\n rmd_set_instruct_text($rmd_step)\r\n end\r\n\r\n case $rmd_step\r\n when 1\r\n display(\"CFS_KIT RECORDER_MGMT_SCREEN\",500,50) \r\n cmd(\"CFE_EVS ENA_APP_EVENT_TYPE with APPNAME DS, BITMASK 0x01\") # Enable debug events\r\n when 2..RMD_LAST_STEP\r\n # Keep case statement for maintenance\r\n else\r\n cmd(\"CFE_EVS DIS_APP_EVENT_TYPE with APPNAME DS, BITMASK 0x01\") # Disable debug events\r\n $rmd_step = 0\r\n clear(\"CFS_KIT RECORDER_MGMT_SCREEN\") \r\n clear(\"CFS_KIT RECORDER_MGMT_DEMO_SCREEN\")\r\n clear(\"CFS_KIT RECORDER_MGMT_DEMO_INFO_SCREEN\")\r\n end # Step Case\r\n \r\n elsif (button == \"DEMO\")\r\n \r\n case $rmd_step\r\n\r\n # 1- \r\n when 1\r\n if ($rmd_demo == 0)\r\n # Don't increment rmd_demo; okay if user repeats cmd\r\n end\r\n \r\n # 2 - \r\n when 2\r\n case $rmd_demo\r\n when 0 \r\n $rmd_demo += 1\r\n when 1 \r\n $rmd_demo += 1\r\n when 2 \r\n $rmd_demo += 1\r\n when 3\r\n $rmd_demo += 1\r\n else\r\n $rmd_demo = 0\r\n end # Case $rmd_demo\r\n\r\n # 3 - \r\n when 3\r\n if ($rmd_demo == 0)\r\n $rmd_demo += 1\r\n elsif ($rmd_demo == 1)\r\n $rmd_demo += 1\r\n elsif ($rmd_demo == 2)\r\n # Don't increment rmd_demo; okay if user repeats cmd\r\n end\r\n\r\n # 4 - \r\n when 4\r\n if ($rmd_demo == 0)\r\n $rmd_demo += 1\r\n elsif ($rmd_demo == 1)\r\n # Don't increment rmd_demo; okay to repeat last command\r\n end\r\n\r\n end # Step Case\r\n end # Demo\r\n \r\nend",
"def print_banner\n print_line BANNER\n print_line\n print_line\n end",
"def header\n puts \"Knock-Out BlackJack Card Counting trainer v0.1\"\n puts \"\"\nend",
"def print_start_header()\n puts \">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\"\nend",
"def print_generate_invoices_info\n puts \"\\n==============================2.TEST generateInvoices====================================\"\n end",
"def end\n\t\ttexts = get_texts(\"end\")\n\t\t@title = texts.title.html_safe\n\t\t@heading = texts.heading.html_safe\n\t\t@text = texts.body.html_safe\n\t\n\t\trespond_to do |format|\n\t\t\tformat.html\n\t\t\t# format.xml { render :xml => @foobar } #.to_xml }\n\t\t\t# format.json { render :json => @foobar } #.to_json }\n\t\tend\n\tend",
"def show_smazzata_end(best_pl_points )\r\n @log.debug \"Show smazzata end dialogbox\"\r\n str = \"** Segno terminato: vince #{best_pl_points.first[0]} col punteggio #{best_pl_points.first[1]} a #{best_pl_points[1][1]}\\n\"\r\n log str\r\n \r\n if @option_gfx[:use_dlg_on_core_info]\r\n @msgbox_smazzataend.show_message_box(\"Smazzata finita\", str.gsub(\"** \", \"\"), true)\r\n @msgbox_smazzataend.set_visible(true)\r\n end\r\n \r\n end",
"def end_tag\n \"#TESTLAB-END-#{self.bridge.to_s.upcase}\"\n end"
] | [
"0.6391423",
"0.63385576",
"0.60705686",
"0.60705686",
"0.60705686",
"0.5980476",
"0.5980476",
"0.5969855",
"0.5968507",
"0.5950886",
"0.57932425",
"0.57872635",
"0.5742891",
"0.5705",
"0.5704522",
"0.5666644",
"0.56256664",
"0.56256664",
"0.56157744",
"0.5559606",
"0.54871166",
"0.5461851",
"0.5437749",
"0.542558",
"0.5365379",
"0.5355759",
"0.5355221",
"0.53511316",
"0.53472936",
"0.53450394",
"0.534477",
"0.5337944",
"0.5322542",
"0.53113353",
"0.53094906",
"0.529521",
"0.5278632",
"0.52732486",
"0.5267671",
"0.52443695",
"0.52422565",
"0.5239423",
"0.523666",
"0.5221949",
"0.52141297",
"0.52105105",
"0.5207444",
"0.5194014",
"0.5194014",
"0.51856124",
"0.5184356",
"0.5184106",
"0.5173168",
"0.5170734",
"0.5170376",
"0.5168371",
"0.51664984",
"0.5162665",
"0.5156855",
"0.5154993",
"0.51542914",
"0.51517874",
"0.5150368",
"0.5135622",
"0.51343626",
"0.51331645",
"0.51331186",
"0.5127829",
"0.51217544",
"0.51179767",
"0.51179767",
"0.51179767",
"0.51179767",
"0.51137424",
"0.51116264",
"0.5107167",
"0.5107167",
"0.5104538",
"0.5104538",
"0.50883394",
"0.5078557",
"0.50762224",
"0.5074095",
"0.50685644",
"0.5059939",
"0.50571734",
"0.5056029",
"0.5054049",
"0.5052185",
"0.50502634",
"0.5049559",
"0.5048755",
"0.5047921",
"0.50470954",
"0.50419134",
"0.50405544",
"0.50394475",
"0.50353897",
"0.50273776",
"0.5026023",
"0.50256413"
] | 0.0 | -1 |
Config pagination header manually | def set_pagination_header(name, options = {})
scope = instance_variable_get("@#{name}")
request_params = request.query_parameters
url_without_params = request.original_url.slice(0..(request.original_url.index("?")-1)) unless request_params.empty?
url_without_params ||= request.original_url
page = {}
page[:first] = 1 if scope.total_pages > 1 && !scope.first_page?
page[:last] = scope.total_pages if scope.total_pages > 1 && !scope.last_page?
page[:next] = scope.current_page + 1 unless scope.last_page?
page[:prev] = scope.current_page - 1 unless scope.first_page?
pagination_links = []
page.each do |k, v|
new_request_hash= request_params.merge({ page: v })
pagination_links << "<#{url_without_params}?#{new_request_hash.to_param}>; rel=\"#{k}\""
end
headers['Link'] = pagination_links.join(', ')
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_pagination_header(resource,resource_name)\n #print current page\n headers[\"x-page\"] = page\n #print records per page\n headers[\"x-per-page\"] = per_page\n #print total records\n headers[\"x-total\"] = resource.total_count\n #print next page url\n headers[\"next_page\"] = eval \"api_v1_#{resource_name}_url(request.query_parameters.merge(page: resource.next_page))\" if resource.next_page\n #print prev page url\n headers[\"prev_page\"] = eval \"api_v1_#{resource_name}_url(request.query_parameters.merge(page: resource.next_page))\" if resource.prev_page\n end",
"def set_pagination_headers\n headers[\"Access-Control-Expose-Headers\"] = \"Link, Page, Per-Page\"\n headers[\"Page\"] = current_page\n headers[\"Per-Page\"] = current_per_page\n end",
"def pagination_headers\n self.class.const_get(:PAGINATION_HEADERS)\n end",
"def pagination_headers(collection)\n links = (headers['Link'] || \"\").split(',').map(&:strip)\n clean_url = request.original_url.sub(/\\?.*$/, '')\n\n paging_info = pages(collection)\n\n paging_info.each do |key, value|\n query_params = request.query_parameters.merge(page: value)\n links << %Q( <#{clean_url}?#{query_params.to_param}>; rel=\"#{key}\" )\n end\n\n headers['Link'] = links.join(\", \") unless links.empty?\n headers[PagerApi.total_pages_header] = collection.total_pages\n headers[PagerApi.total_count_header] = collection.total_count\n\n return nil\n end",
"def paginator_navigation\n paginator_config = {\"borders\"=>5, \"per_page\"=>10}.merge(config[\"paginator\"] || {})\n page_count = all.length\n total_pages = (page_count.to_f/paginator_config[\"per_page\"]).ceil\n current_page = master.page_data['current_page'].to_i\n current_page = current_page.zero? ? 1 : current_page\n\n left_dots = ((current_page+1)/2).ceil\n right_dots = ((total_pages+current_page)/2).ceil\n borders = paginator_config[\"borders\"]\n\n pages = total_pages.times.select { |i|\n i+1 <= borders || i+1 > total_pages-borders ||\n (i+1 >= current_page-(borders/2).ceil && i+1 <= current_page+(borders/2).ceil) ||\n i+1 == left_dots || i+1 == right_dots\n }.map { |i|\n url = i.zero? && paginator_config[\"root_page\"] ?\n paginator_config[\"root_page\"] : \"#{paginator_config[\"url\"]}/#{i+1}\"\n name = (i+1 > borders) && (i+1 < total_pages-borders) &&\n ((i+1 < current_page-(borders/2).ceil) || (i+1 > current_page+(borders/2).ceil)) &&\n (i+1 == left_dots || i+1 == right_dots) ? '…' : \"#{i+1}\"\n\n {\n \"url\" => ruhoh.to_url(url),\n \"name\" => name,\n \"is_active_page\" => (i+1 == current_page),\n \"title\" => \"#{i+1}\"\n }\n }\n pages\n end",
"def pagination(items_count, default_per_page: 20,\n maxium_per_page: 100,\n set_header: true)\n items_count = items_count.count if items_count.respond_to? :count\n\n @pagination_per_page = (params[:per_page] || default_per_page).to_i\n @pagination_per_page = maxium_per_page if @pagination_per_page > maxium_per_page\n @pagination_per_page = 1 if @pagination_per_page < 1\n\n items_count = 0 if items_count < 0\n pages_count = (items_count.to_f / @pagination_per_page).ceil\n pages_count = 1 if pages_count < 1\n\n @pagination_items_count = items_count\n @pagination_pages_count = pages_count\n\n @pagination_page = (params[:page] || 1).to_i\n @pagination_page = pages_count if @pagination_page > pages_count\n @pagination_page = 1 if @pagination_page < 1\n\n if current_page > 1\n @pagination_first_page_url = add_or_replace_uri_param(request.url, :page, 1)\n @pagination_prev_page_url = add_or_replace_uri_param(request.url, :page, (current_page > pages_count ? pages_count : current_page - 1))\n end\n\n if current_page < pages_count\n @pagination_next_page_url = add_or_replace_uri_param(request.url, :page, current_page + 1)\n @pagination_last_page_url = add_or_replace_uri_param(request.url, :page, pages_count)\n end\n\n if set_header\n link_headers ||= []\n\n if current_page > 1\n link_headers << \"<#{@pagination_first_page_url}>; rel=\\\"first\\\"\" if @pagination_first_page_url\n link_headers << \"<#{@pagination_prev_page_url}>; rel=\\\"prev\\\"\" if @pagination_prev_page_url\n end\n\n if current_page < pages_count\n link_headers << \"<#{@pagination_next_page_url}>; rel=\\\"next\\\"\" if @pagination_next_page_url\n link_headers << \"<#{@pagination_last_page_url}>; rel=\\\"last\\\"\" if @pagination_last_page_url\n end\n\n link_header = link_headers.join(', ')\n\n if self.respond_to?(:header)\n self.header('Link', link_header)\n self.header('X-Items-Count', items_count.to_s)\n self.header('X-Pages-Count', pages_count.to_s)\n end\n\n if defined?(response) && response.respond_to?(:headers)\n response.headers['Link'] = link_header\n response.headers['X-Items-Count'] = items_count.to_s\n response.headers['X-Pages-Count'] = pages_count.to_s\n end\n end\n end",
"def Header\r\n\t\tif @page == 1\r\n\t\t\tfirst_page_header\r\n\t\telse\r\n\t\t\tpage_header\t\r\n\t\tend\r\n\tend",
"def pagination=(count)\n settings.pagination = count\n end",
"def setup_pager!\n page\n end",
"def pagination_param\n Kaminari.config.param_name\n end",
"def pagination_param\n Kaminari.config.param_name\n end",
"def add_page_header\n tenant = MnoEnterprise::Tenant.show\n title = Settings.dashboard.payment.enabled ? \"#{t(:monthly_invoice)} - \" : \"#{t(:account_statement)} - \"\n @pdf.repeat :all do\n @pdf.bounding_box([0, @pdf.bounds.top+@format[:header_size]], width: 540, height: @format[:footer_size]) do\n @pdf.float do\n @pdf.image main_logo_white_bg_path(true), fit: [135, (@format[:footer_size])]\n end\n @pdf.text tenant.name, align: :right, inline_format: true\n if tenant.main_address.present?\n tenant_address = \"#{tenant.main_address.dig('street')}\\n#{tenant.main_address.dig('city')}\\n#{ISO3166::Country.new(tenant.main_address.dig('country_code')).name}\"\n @pdf.move_down 5\n @pdf.text \"<color rgb='999999'>#{tenant_address}</color>\", align: :right, inline_format: true, style: :italic, size: 9\n end\n if contact_details = Settings.dashboard&.organization_management&.billing&.invoice_contact_details.presence\n @pdf.move_down 5\n @pdf.font_size(10) { @pdf.text contact_details, align: :right }\n end\n @pdf.move_down 10\n @pdf.font_size(20) { @pdf.text \"#{title} #{@data[:period_month]}\", style: :bold, align: :right }\n end\n end\n end",
"def pagination_setup(paginator: ManifestItem::Paginator, **opt)\n # noinspection RubyMismatchedReturnType\n super\n end",
"def paginator; end",
"def pagination_info\n <<-HTML.strip_heredoc\n <div class='table-me-pagination-info'>\n <h3>#{options[:name].split('_').join(' ').titleize}</h3> <p><b>#{options[:page]}</b> of <b>#{options[:page_total]}</b> out of a total <b>#{options[:total_count]}</b></p>\n </div>\n HTML\n end",
"def pagination\n [ :next_page ]\n end",
"def pages_head(current_page)\n head = [1, 2, 3]\n head += [4, 5, 6] if current_page <= 2\n head\n end",
"def add_page_numbering\n numbering_options = {\n at: [@pdf.bounds.right - 150, 0-@format[:footer_size]],\n width: 150,\n align: :right,\n start_count_at: 1,\n color: \"999999\",\n size: 8\n }\n @pdf.number_pages t('paging'), numbering_options\n end",
"def paginator=(_arg0); end",
"def pagination\n settings.pagination || 50\n end",
"def pagination_options\n { :offset => offset, :limit => per_page } if paginated?\n end",
"def per_page; end",
"def add_pagination_and_sorting( query )\n\n query.paginate( page: params[:page] ).order( \"#{sort_column} #{( sort_order == 'a' ) ? 'ASC' : 'DESC' }\" )\n\n end",
"def index\n @header_templates = HeaderTemplate.page params[:page]\n end",
"def pagination_setup(paginator: User::Paginator, **opt)\n # noinspection RubyMismatchedReturnType\n super\n end",
"def index\n @tconfigurations = Tconfiguration.all.sort{|x,y| x.Name<=>y.Name}\n #pagination\n @parametrs=params\n if params[:pagenum]\n @pagenum=params[:pagenum].to_i\n else\n @pagenum=1\n end\n @perpage=30\n @pagecount=@tconfigurations.length/@perpage\n @pagecount+=1 if @tconfigurations.length%@perpage!=0\n @firstline=(@pagenum-1)*@perpage\n @tconfigurations=@tconfigurations.drop(@firstline).take(@perpage)\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @tconfigurations }\n end\n end",
"def set_page_vars\n @page = params[:page] ? params[:page].to_i : START_PAGE\n @per_page = PER_PAGE\n end",
"def pagination_title(set)\n return unless set.respond_to?('total_pages')\n \"page #{set.current_page} of #{set.total_pages}\" if set.total_pages > 1\n end",
"def pagination_method\n Kaminari.config.page_method_name\n end",
"def pagination_method\n Kaminari.config.page_method_name\n end",
"def initialize\n @page = 1\n end",
"def pagination_page_param\n WillPaginate::ViewHelpers.pagination_options[:param_name]\n end",
"def paginatable?; paginatable; end",
"def page_header(site_config, page_count)\n # start common page region\n page = %(<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"UTF-8\">\n <meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n <!-- The above 3 meta tags *must* come first in the head;\n any other head content must come *after* these tags -->\n <title>#{site_config['title']}</title>)\n page += add_icons\n page += %(\n <meta name=\"description\" content=\"#{site_config['description']}\">\n <meta name=\"theme-color\" content=\"##{site_config['theme_color']}\">\n <link rel=\"stylesheet\" href=\"assets/bootstrap/css/bootstrap.min.css\">\n <link rel=\"stylesheet\" href=\"assets/bootstrap/css/bootstrap-theme.min.css\">\n <style>\n .container-fluid { padding: 0px; }\n .navbar, .navbar-default { margin-bottom: 0; padding: 5pt; background-color: ##{site_config['theme_color']}; font-size: 12pt; }\n .navbar, .navbar-default li a { color: ##{site_config['text_color']} !important; }\n .navbar-default .navbar-brand { margin-left: 20px !important; color: ##{site_config['logo_text_color']}; font-size: 18pt; font-weight: bold; }\n .navbar-brand:hover { background-color: #{site_config['nav_hover_color']} !important; }\n div[id^=\"d3pie_chart_div_\"], canvas { margin-bottom: 100px; }\n footer { background-color: ##{site_config['theme_color']}; min-height: 200px;}\n footer ul a { color: ##{site_config['text_color']} !important; font-size: 13pt; }\n footer .container { margin-left: 15px; }\n .built { text-decoration: none !important; }\n .selected { background-color: #{site_config['nav_selected_color']}; font-weight: bold; }\n .navbar-default li:hover a { background-color: #{site_config['nav_hover_color']} !important; }\n h1 { text-align: center; background-color: ##{site_config['theme_color']}; padding: 14px; color: ##{site_config['text_color']}; }\n pre { white-space: pre-wrap; word-wrap: break-word; }\n .homepage { padding: 5px 30px 5px 30px; }\n .logo { float: left; }\n .oll { padding-left: 1em; }\n h2#other { text-align: center; }\n .plotlypie { height: 625px; }\n </style>\n </head>\n <body>\n <!-- Static navbar -->\n <nav class=\"navbar navbar-default\" id=\"head1\">\n <div class=\"container-fluid\">\n <div class=\"navbar-header\">\n <a href=\"index.html\"><img src=\"assets/images/logo.png\" alt=\"Ruby Powered\" class=\"logo\"></a>\n <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#navbar\" aria-expanded=\"false\" aria-controls=\"navbar\">\n <span class=\"sr-only\">Toggle navigation</span>\n <span class=\"icon-bar\"></span>\n <span class=\"icon-bar\"></span>\n <span class=\"icon-bar\"></span>\n </button>\n <a class=\"navbar-brand\" href=\"index.html\">#{site_config['nav_heading']}</a>\n </div>\n <div id=\"navbar\" class=\"navbar-collapse collapse\">\n <ul class=\"nav navbar-nav\">)\n page += add_links(page_count)\n page += %(\n </ul>\n </div>\n </div>\n </nav>\n <div class=\"container-fluid\">)\n (0..page_count).map do |i|\n instance_variable_set(\"@page#{ii(i)}\", page)\n end\nend",
"def do_pagination\n @page_number = 1\n if params[:page] && params[:page].to_i > 0\n @page_number = params[:page].to_i\n end\n @pagination = true\n @pagination_options = { :limit => items_per_page, :offset => (@page_number - 1) * items_per_page }\n @pagination_options = {} if params[:all]\n end",
"def pagination_setup(paginator: Org::Paginator, **opt)\n opt[:id] ||= identifier\n # noinspection RubyMismatchedReturnType\n super\n end",
"def paginate_at ()\n return 8\n end",
"def end_paginate! results\n # set the link headers\n link = \"\"\n link += \"<#{@next_page}>; rel=\\\"next\\\"\" unless results.empty? or results.count < @limit\n headers['X-Next-Page'] = @next_page unless results.empty? or results.count < @limit\n if not results.empty? and @page > 1\n link += \", \"\n end\n link += \"<#{@prev_page}>; rel=\\\"prev\\\"\" unless @page == 1\n headers['X-Prev-Page'] = @prev_page unless @page == 1\n headers['Link'] = link\n headers['X-Total-Count'] = @count.to_s\n end",
"def pager; end",
"def default_per_page(count)\n before do\n @current_default_per_page = Kaminari.config.default_per_page\n Kaminari.config.default_per_page = 1\n end\n\n after do\n Kaminari.config.default_per_page = @current_default_per_page\n end\n end",
"def set_params_page\n params[:page] = params.fetch(\"page\"){1}.to_i\n params[:per_page] = params.fetch(\"per_page\"){20}.to_i\n end",
"def after_pagination\n end",
"def crear_paginacion\n @page = params[:page].nil? ? 1 : params[:page].to_i\n @per_page = 3\n end",
"def pdf_header(pdf, report)\n # TODO: when we can use prawn >= 0.7.1, use the pdf.page_number method instead of counting ourselves\n @page_count = 0\n pdf.header [pdf.margin_box.left, pdf.margin_box.top + 10] do\n pdf.font \"Helvetica\" do\n pdf.text report.title, :size => 12, :align => :left\n pdf.move_up(16) # move back up so that the next two lines are more or less even with the title line\n pdf.text Time.now, :size => 8, :align => :right\n pdf.text \"Page: #{@page_count = @page_count + 1}\", :size => 8, :align => :right\n pdf.stroke_horizontal_rule\n end\n end\n end",
"def initialize *args\n super *args\n self.page = (page.to_i < 1 ? 1 : page.to_i)\n end",
"def paginate(collection)\n pages = {}\n\n unless collection.first_page?\n pages[:first] = 1\n pages[:prev] = collection.current_page - 1\n end\n\n unless collection.last_page?\n pages[:last] = collection.total_pages\n pages[:next] = collection.current_page + 1\n end\n\n links = []\n url = request.original_url.sub(/\\?.*$/, '')\n\n pages.each do |k, v|\n new_params = request.query_parameters.merge(page: v)\n links << %(<#{url}?#{new_params.to_param}>; rel=\"#{k}\")\n end\n\n headers['Link'] = links.join(', ') unless links.empty?\n end",
"def pagination_setup(paginator: SearchPaginator, **opt)\n # noinspection RubyMismatchedReturnType\n super\n end",
"def paginates_per(val)\n @_default_per_page = val\n end",
"def per_page\n 10\n end",
"def pagination_info\n @response ? super : {}\n end",
"def pagination_page\n @pagination_page\n end",
"def set_params_page\n params[:page] = params.fetch(\"page\"){1}.to_i\n params[:per_page] = params.fetch(\"per_page\"){20}.to_i\n end",
"def index\n @headers = Header.all\n add_breadcrumb @headers.first.header_lang(current_user), 'headers'\n end",
"def render_headings\n\t\tpage_count.times do |i|\n\t\t\tpage=i+1\n\t\t\tgo_to_page(page)\n\t\t \n\t\t\theadings_box do\n\t\t\t\tfont_size(@header_size) do\n\t\t\t\t\tyield page\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend",
"def paginate_with(kind)\n @pagination ||=\n case kind\n when :paginator then paginator\n when :range then pagination_range\n end\n end",
"def per_page; @opts['per_page']; end",
"def kopal_layout_before_page_header\n\n end",
"def will_paginate_extended(collection, options = {})\n all_options = { style: 'display: inline-block;' }\n all_options = options.merge(all_options)\n\n (will_paginate(collection, all_options) || ''.html_safe) +\n ' Per page: '.html_safe +\n select_tag(:per_page, options_for_select([5, 10, 20], params[:per_page] || 10),\n onchange: \"if(this.value){window.location='?per_page='+this.value;}\")\n end",
"def index\n\n @upcoming_events = Event.upcoming.paginate(page: params[:page])\n @upcoming_header = 'Upcoming Events'\n\n @prev_events = Event.prev.paginate(page: params[:page])\n @prev_header = 'Previous Events'\n \n end",
"def header(format, ntrks, division)\n end",
"def set_pagination\n set_params_page\n @updates = @updates.paginate(:page => params[:page], :per_page => params[:per_page], :order => :created_at.desc)\n set_pagination_buttons(@updates)\n end",
"def index\n @rcontrollers = Rcontroller.page(page).per(per_page)\n set_pagination_header(@rcontrollers,\"rcontrollers\")\n end",
"def set_pagination_info(page_no, page_size, record_count)\n @current_page = page_no\n @page_size = page_size\n @pagination_record_count = record_count\n @page_count = (record_count / page_size.to_f).ceil\n end",
"def paginate(collection , options = {})\n #options = options.merge defaults\n options[:renderer] = BootstrapPagination::Rails\n will_paginate collection, options\n end",
"def index\n @hbanners_pages, @hbanners = paginate(:hbanners,\n\t\t:per_page => 5, \n\t\t:order => 'hbanners.updated_at DESC')\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @hbanners }\n end\n end",
"def provide_navigation_by_all_headings\n # Interface method\n end",
"def set_pagination_buttons(data, options = {})\n return if data.nil? || data.empty?\n\n if data.next_page\n params = {\n :page => data.next_page,\n :per_page => data.per_page\n }.merge(options)\n\n @next_page = \"?#{Rack::Utils.build_query params}\"\n end\n\n if data.previous_page\n params = {\n :page => data.previous_page,\n :per_page => data.per_page\n }.merge(options)\n\n @prev_page = \"?#{Rack::Utils.build_query params}\"\n end\n end",
"def default_page_size\n 50\n end",
"def pagination_links(obj)\n paginator = Paginator.new(obj)\n paginator.current_page = params[:page]\n\n # PAGE BUTTON LINKS (w/Bootstap styling)\n def previous_page_link(paginator)\n content_tag :li do\n link_to({ controller: \"#{controller_name}\", action: \"#{action_name}\", page: paginator.previous_page }, { remote: true, :'aria-label' => 'Previous Page', title: 'Previous Page', data: { toggle: 'tooltip', placement: 'top' } }) do\n raw '<span aria-hidden=\"true\">«</span>'\n end\n end\n end\n\n def previous_group_link(paginator)\n content_tag :li do\n link_to({ controller: \"#{controller_name}\", action: \"#{action_name}\", page: paginator.previous_range_first_page }, { remote: true, :'aria-label' => 'Previous Page Group', title: 'Previous Page Group', data: { toggle: 'tooltip', placement: 'top' } }) do\n raw '<span aria-hidden=\"true\">...</span>'\n end\n end\n end\n\n def page_link(page_num, current_page)\n status = 'active' if page_num == current_page\n\n content_tag :li, class: \"#{status}\" do\n link_to(\"#{page_num}\", controller: \"#{controller_name}\", action: \"#{action_name}\", page: \"#{page_num}\", remote: true)\n end\n end\n\n def next_group_link(paginator)\n content_tag :li do\n link_to({ controller: \"#{controller_name}\", action: \"#{action_name}\", page: paginator.next_range_first_page }, { remote: true, :'aria-label' => 'Next Page Group', title: 'Next Page Group', data: { toggle: 'tooltip', placement: 'top' } }) do\n raw '<span aria-hidden=\"true\">...</span>'\n end\n end\n end\n\n def next_page_link(paginator)\n content_tag :li do\n link_to({ controller: \"#{controller_name}\", action: \"#{action_name}\", page: paginator.next_page }, { remote: true, :'aria-label' => 'Next Page', title: 'Next Page', data: { toggle: 'tooltip', placement: 'top' } }) do\n raw '<span aria-hidden=\"true\">»</span>'\n end\n end\n end\n\n # BUILD PAGINATION LINKS\n # Start Bootstap pagination style\n output = '<nav class=\"text-center\"><ul class=\"pagination\">'\n\n # Show the '<<' aka previous page button\n unless paginator.first_page? || paginator.current_page < 1\n output += previous_page_link(paginator)\n end\n\n # Show the '...' aka previous page group button\n unless paginator.first_page? || paginator.current_page <= paginator.links_per_page\n output += previous_group_link(paginator)\n end\n\n # Show individual page links, stopping at the limit of links per page set by the user\n paginator.current_range_first_page.upto(paginator.last_page_in_current_range) do |page_num|\n output += page_link(page_num, paginator.current_page) unless page_num > paginator.total_pages\n end\n\n # Show the '...' aka next page group button\n unless paginator.next_range_first_page > paginator.total_pages || paginator.last_page?\n output += next_group_link(paginator)\n end\n\n # Show the '>>' aka next page button\n unless paginator.last_page? || paginator.current_page > paginator.total_pages\n output += next_page_link(paginator)\n end\n\n # Append page numbering and end Bootstap pagination style\n output += \"</ul><p><strong>Page:</strong> #{paginator.current_page} / #{paginator.total_pages}</p></nav>\"\n\n # Output the string to the view\n raw output unless paginator.total_pages <= 1\n end",
"def set_paging\n if extjs_paging?\n offset = params[:start].to_i\n @page_limit = params[:limit].to_i\n @current_page = (offset / @page_limit) + 1 # start at page 1\n else\n @page_limit = 20 # sync with ExtJS PagingToolbar configuration in view!\n @current_page = 1\n end\n end",
"def table_pager(type, next_page)\n html = String.new\n html << '<ul class=\"pager\">'\n page = request.id.to_i\n if page > 1\n html << \"<li><a href=\\\"#{url_for(\"#{type}/#{page-1}?#{h request.query_string}\")}\\\">Previous</a></li>\"\n else\n html << '<li class=\"disabled\"><a href=\"#\">Previous</a></li>'\n end\n if next_page\n page = 1 if page < 1\n html << \"<li><a href=\\\"#{url_for(\"#{type}/#{page+1}?#{h request.query_string}\")}\\\">Next</a></li>\"\n else\n html << '<li class=\"disabled\"><a href=\"#\">Next</a></li>'\n end\n html << \"</ul>\"\n html << \"<p><a href=\\\"#{url_for(\"#{type}/csv?#{h request.query_string}\")}\\\">CSV Format</a></p>\"\n end",
"def page_at=(setting)\n @page_at = setting == :auto ? output_rows - 2 : setting\n end",
"def typus_paginate(items, options)\n render \"admin/resources/pagination\"\n end",
"def create_pagination(pages,controller,current_page)\n pagination = \"\"\n params[:controller] = controller\n current_page = 1 if current_page.nil?\n if pages.length > 1\n pagination << link_to('<', {:params => params.merge('page' => pages.first)}) << \" \"\n end\n if pages.length > 1\n pages.each do |page|\n if (page.number < current_page.to_i+6) && (page.number > current_page.to_i-6)\n if current_page.to_i == page.number\n pagination << page.number.to_s << \" \"\n else\n pagination << link_to(page.number, {:params => params.merge('page' => page)}) << \" \"\n end\n end\n end\n end\n if pages.length > 1\n pagination << link_to('>', {:params => params.merge('page' => pages.last)}) << \" \"\n end\n return pagination\n end",
"def default_page_size\n 10\n end",
"def per_page\n 6\n end",
"def paginations(collection)\n content_tag :div, class: 'page-wrapper' do\n if collection.next_page\n will_paginate collection, renderer: BootstrapPagination::Rails\n else\n content_tag :div, \"没有更多了\", class: 'no-more'\n end\n end\n end",
"def extract_pagination(options)\n if (value = options.delete(AUTO_PAGINATION))\n api.auto_pagination = value\n end\n end",
"def pagination_controls\n <<-HTML.strip_heredoc\n <div class='table-me-pagination-controls'>\n <a href=\"#{prev_page_url}\" class='previous'>« Prev</a> #{pagination_number_list} <a href=\"#{next_page_url}\" class='next'>Next »</a>\n </div>\n HTML\n end",
"def page_header(header = nil)\n content_tag(:div, class: 'page-header') do\n content_tag(:h1) do\n content_tag(:span, header || t('.header')) +\n content_tag(:div, class: 'pull-right') do\n yield if block_given?\n end\n end\n end\n end",
"def paginate_path(site, num_page); end",
"def paginate\n paginated?? self : page(1)\n end",
"def info_page(collection, style = nil)\n if collection.page(1).length > 0\n html = \"#{t('views.pagination.displaying')} #{collection.offset_value + 1} -\n #{collection.offset_value + collection.length}\"\n html << \" #{t('of')} #{collection.total_count}\"\n\n content_tag :div, html, class: 'pagination', style: style\n end\n end",
"def set_pagination_buttons(data, options = {})\n return if data.nil? || data.empty?\n\n if data.next_page\n params = {\n :page => data.next_page,\n :per_page => data.per_page\n }.merge(options)\n\n @next_page = \"?#{Rack::Utils.build_query params}\"\n end\n\n if data.previous_page\n params = {\n :page => data.previous_page,\n :per_page => data.per_page\n }.merge(options)\n\n @prev_page = \"?#{Rack::Utils.build_query params}\"\n end\n end",
"def first_page_number\n 1\n end",
"def paginate; false; end",
"def paginate_control collection\n previous_query = '?' + URI.escape(request.params.update('page' => collection.page - 1).map{|*a| a.join('=')}.join('&')).to_s\n previous_url = URI.parse(request.url).merge(previous_query).to_s\n\n next_query = '?' + URI.escape(request.params.update('page' => collection.page + 1).map{|*a| a.join('=')}.join('&')).to_s\n next_url = URI.parse(request.url).merge(next_query).to_s\n\n haml(paginate_control_haml, locals: {collection: collection, previous_url: previous_url, next_url: next_url}, layout: false)\n end",
"def set_activities_for_pagination\n \n end",
"def link_header n\n html = \" \"\n n.times { html += '<th class=\"linkheader\"></th>' }\n html.html_safe\n end",
"def auto_paginate\n ENV['KARATEKIT_AUTO_PAGINATE']\n end",
"def set_params\n params[:page] ||= 1\n params[:per_page] ||= 15\n params[:order_by] ||= \"id\"\n params[:order_direction] ||= \"ASC\"\n end",
"def paginate(objects, options = {})\n options.reverse_merge!( theme: 'twitter-bootstrap-3' )\n\n content_tag :div, class: 'pagination-wrap' do\n super( objects, options )\n end\n end",
"def set_page_size\n unless params[:per_page].blank?\n cookies[:per_page] = params[:per_page]\n @per_page = params[:per_page]\n else\n @per_page = cookies[:per_page] || 20\n end\n end",
"def pos_partial_page_header\n pos_fil_header + 4\n end",
"def paginator\n per_page = @ruhoh.db.config(\"paginator\")[\"per_page\"]\n current_page = master.context[\"page\"]['current_page'].to_i rescue 0\n current_page = current_page.zero? ? 1 : current_page\n offset = (current_page-1)*per_page\n\n page_batch = all[offset, per_page]\n raise \"Page does not exist\" unless page_batch\n page_batch\n end",
"def render_pagination\n num_pages = Document.num_results.to_f / @per_page.to_f\n num_pages = Integer(num_pages.ceil)\n return '' if num_pages == 0\n\n content_tag :div, :class => 'ui-grid-c' do\n content = content_tag(:div, :class => 'ui-block-a') do\n if @page != 0\n page_link(I18n.t(:'search.index.first_button'), 0, 'back')\n end\n end\n content << content_tag(:div, :class => 'ui-block-b') do\n if @page != 0\n page_link(I18n.t(:'search.index.previous_button'), @page - 1, 'arrow-l')\n end\n end\n\n content << content_tag(:div, :class => 'ui-block-c') do\n if @page != (num_pages - 1)\n page_link(I18n.t(:'search.index.next_button'), @page + 1, 'arrow-r', true)\n end\n end\n content << content_tag(:div, :class => 'ui-block-d') do\n if @page != (num_pages - 1)\n page_link(I18n.t(:'search.index.last_button'), num_pages - 1, 'forward', true)\n end\n end\n\n content\n end\n end",
"def index\n @headings = @project.headings.order(:name).page params[:page]\n end",
"def pagination!\n parameter :page, :integer, :required => false, :default => 1, :paramType => \"query\"\n parameter :per_page, :integer, :required => false, :default => 30, :paramType => \"query\", \n :allowed => 1..1000\n end",
"def pagination_range\n case JSONAPI.configuration.default_paginator\n when :paged\n number = page_params['number'].to_i.nonzero? || 1\n size = page_params['size'].to_i.nonzero? || JSONAPI.configuration.default_page_size\n (number - 1) * size..number * size - 1\n when :offset\n offset = page_params['offset'].to_i.nonzero? || 0\n limit = page_params['limit'].to_i.nonzero? || JSONAPI.configuration.default_page_size\n offset..offset + limit - 1\n else\n paginator.pagination_range(page_params)\n end\n end",
"def pagination(paginated_collection)\n Pagination::Template.new(paginated_collection).render \n end"
] | [
"0.754653",
"0.74930876",
"0.6708229",
"0.6503024",
"0.6232348",
"0.6212366",
"0.62021303",
"0.6172817",
"0.6148266",
"0.61431813",
"0.61431813",
"0.6095978",
"0.6084311",
"0.6083161",
"0.59970886",
"0.5986855",
"0.59765905",
"0.5972433",
"0.5948929",
"0.5921677",
"0.5903181",
"0.5895889",
"0.5893526",
"0.5892559",
"0.5882265",
"0.5868678",
"0.58684325",
"0.5865839",
"0.5856466",
"0.5856466",
"0.5786315",
"0.5781516",
"0.57659864",
"0.57447124",
"0.57091963",
"0.56996596",
"0.56953055",
"0.56949484",
"0.56593496",
"0.5658465",
"0.5657275",
"0.5645501",
"0.56309646",
"0.5624621",
"0.5616264",
"0.56134874",
"0.56056535",
"0.560532",
"0.56032145",
"0.5602116",
"0.55979526",
"0.5594095",
"0.5592115",
"0.55885214",
"0.55839336",
"0.5579593",
"0.55716425",
"0.557022",
"0.556793",
"0.55604005",
"0.5558969",
"0.55554223",
"0.5553526",
"0.55392206",
"0.5535261",
"0.55336714",
"0.5526805",
"0.5522249",
"0.5517242",
"0.550813",
"0.5503337",
"0.5503324",
"0.5498399",
"0.549814",
"0.54943043",
"0.5493501",
"0.5490344",
"0.54853326",
"0.54849565",
"0.5482181",
"0.54802233",
"0.545937",
"0.54554296",
"0.5454955",
"0.54541004",
"0.5441398",
"0.54412484",
"0.5439965",
"0.54350734",
"0.5430938",
"0.5424529",
"0.54190415",
"0.5414807",
"0.5406294",
"0.5403727",
"0.54033357",
"0.54023427",
"0.5397025",
"0.5395107",
"0.5393785"
] | 0.7530348 | 1 |
Store user in session to simulate logging in | def login(user)
request.session[:uid] = user.id
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def user_log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n reset_session\n set_user_id_in_session(user.id)\n # Guard against session replay attacks\n session[:session_token] = user.session_token\n end",
"def login_as(user)\n session[:user_id] = user.id\n end",
"def log_in(user)\n session[:user_id] \t = user.id\n session[:user_name] = user.name\n session[:user_email] = user.email\n end",
"def login!\n session[:user_id] = @user.id\n end",
"def log_in_as(user)\n puts 'logging in!!'\n session[:name] = user.name\n end",
"def log_in(user_id)\n session[:user_id] = user_id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n\n end",
"def login user\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:user_id] = user.id\n end",
"def log_in_as(user)\n session[:regular_user_id] = user.id\n end",
"def set_user\n require_logged_in\n @user = User.find_by(id: session[:user_id])\n end",
"def login(user)\n session[:user_id] = user.id\n end",
"def login_from_session\n self.current_user = User.find_by_id( session[ :user_id ] ) if session[ :user_id ]\n end",
"def login_user\n\t @user_session = UserSession.new\t \n\tend",
"def set_user_session(user)\n session[:logged_in_users_id] = user.id\n end",
"def login_user(user)\n session[\"app_user\"] = user\n end",
"def login_user\n session[:user_id] = @user.id\n end",
"def login(user)\n session[:user_id] = user.id\n end",
"def login!(user, options = {})\n session[:session_token] = user.persistence_token\n session[:active_at] = Time.now\n @current_user = user\n end",
"def login(user)\n \tsession[:user_id] = user.id\n end",
"def set_logged_in_user(user)\n set_temporary_logged_in_user(user)\n session[:user_id] = user.id\n set_unlogged_in_user(nil)\n end",
"def sign_in(user)\n session[:session_token] = SecureRandom.urlsafe_base64\n user.update!(session_token: session[:session_token])\n end",
"def login!(user)\n user.reset_session_token!\n # curr_session_token == user.session_token\n # sets user.curr_session_token and persists to UserSessionsOwnership table\n user.set_curr_session_token\n @current_user = user # set current_user upon login\n # session[:session_token] = user.session_token\n session[:session_token] = user.curr_session_token\n end",
"def log_in_as(user)\n session[:admin] = user.id\n end",
"def handle_login(u)\n session[:person] = u.id\n end",
"def login_as(user)\n session[:user_id] = users(user).id\n end",
"def login_as(user)\n Session.create(users(user))\n end",
"def login_from_session\n self.current_user = User.find_by_id(session[:user_id]) if session[:user_id]\n end",
"def set_session\n\t #no idea why I need this, but login form seems to break otherwise\n\t session[:email] = session[:email]\n\t session[:password] = session[:password]\n\t end",
"def login( user )\n session[:user_id] = user ? user.id : nil\n end",
"def login_from_session\n self.current_user = User.find(session[:user_id]) if session[:user_id]\n end",
"def login_from_session\n self.current_user = User.find(session[:user_id]) if session[:user_id]\n end",
"def login_as(user)\n UserSession.create(users(user))\n end",
"def sign_in user\n \t# takes the user object and sets session data using said object\n \tsession[:user_id] = user.id\n \tself.current_user = user\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def login_user(user)\n session[:current_user_id] = user.id\n end",
"def login_as(user)\n @request.session[:user_id] = users(user).id # obtained from fixture\n end",
"def login_as(user)\n @request.session[:user] = user ? users(user).id : nil\n end",
"def login!(user) #sets congruency between session token of user and session\n @current_user = user\n session[:session_token] = user.session_token\n end",
"def log_in_as(user)\n session[:user_id]=user.id\n end",
"def login_user(user = gen_user)\n session[:user_id] = user.id\n return user\n end",
"def login_as(user)\n @request.session[:user_id] = user.id\n end",
"def set_user\n UserInfo.current_user = session[:user]\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def login(user)\n @request.session[:user] = user\n end",
"def login_from_session\n self.current_user = User.find_by_id(session[:user_id]) if session[:user_id]\n end",
"def login_from_session\n self.current_user = User.find(session[:user_id]) if session[:user_id]\n end",
"def set_session_for(user)\n UserSession.create(user)\n end",
"def signin(user)\n session[:user_id] = user.id\n end",
"def set_session user\n session[:user_id] = user.id\n session[:user_fullname] = user.fullname\n session[:user_email] = user.email\n session[:user_access] = user.access\n session[:company_id] = user.company_id\n end",
"def log_in(user)\n current_user = user\n session[:session_token] = user.reset_session_token!\n end",
"def log_in_user!(user)\n session[:session_token] = user.reset_session_token!\n end",
"def log_in_user!(user)\n session[:session_token] = user.reset_session_token!\n end",
"def login_user!(user)\n user.reset_session_token!\n session[:session_token] = user.session_token\n end",
"def login_as(id)\n session['email'] = User.find(id).email\n build_user\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def sign_in(user)\n session[:user_id] = user.id\n end",
"def set_user_session\n UserSession.current_user = current_user\n end",
"def login!(session)\n session[:user_id] = self.id\n end",
"def login(user)\n session[:user_id] = user.try(:id)\n @current_user = user\n end",
"def login_from_session\nself.current_user = User.find_by_id(session[:user_id]) if session[:user_id]\nend",
"def sign_in(user)\n # Issues new random piece of text (aka\n # remember_token) to user\n remember_token = User.new_remember_token\n # Giving user plain text token\n cookies.permanent[:remember_token] = remember_token\n # Hashing the version of the remember_token which the host maintains.\n user.update_attribute(:remember_token, User.hash(remember_token))\n #sets the one who signed in (user) to current user\n self.current_user=user\n end",
"def login!(session)\n session[:user_id] = id\n end",
"def login!(session)\n session[:user_id] = id\n end",
"def login!(session) \n session[:user_id] = id \n end",
"def log_in_user_session_only(user_id)\n session[USER_SESSION_KEY] = user_id\n end",
"def login\n session[:user_id] = nil\n end",
"def login!(user)\n @current_user = user\n session[:session_token] = user.session_token\n end",
"def set_user\n User.before_set_user # for testing only\n\n session[\"user\"] = User.authentication_service.get_user_session(params[:id])\n redirect_to \"/test/users\"\n end",
"def set_user\n @user = User.find_by(username: params[:user][:username])\n session[:user_id] = @user.id\n end",
"def sign_in(user)\n sign_out\n\n user.touch(:last_login_at)\n user_session = user.sessions.new(ip_address: request.remote_ip, user_agent: request.user_agent)\n if user_session.save\n user.password_resets.delete_all\n @current_session = user_session\n end\n end",
"def start_session(user)\n session[:user] = User.find(user).id\n end",
"def login!(user)\n # Store both local ID and horizon_borrower_id\n # to try and prevent mismatches from cookies.\n session[:current_user_id] = user.id\n session[:current_user_horizon_id] = user.horizon_borrower_id\n\n # Blacklight requires this to migrate guest bookmarks over\n # to permanent bookmarks where needed. It's not documented\n # well exactly how this works with non-Devise auth, but appears\n # to be working.\n transfer_guest_user_actions_to_current_user\n end",
"def sign_in\n session[:user_id] = @user.id\n end",
"def login_as(user)\n unless user.is_a?(User)\n user = get_user(user)\n end\n clear_cookies\n GDS::SSO.test_user = user\n end",
"def set_user\n @user = session[:current_user]\n unless @user\n redirect_to home_path\n end\n end",
"def login!(session)\n\t\tsession[:user_id] = id\n\tend"
] | [
"0.79796517",
"0.7946116",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7914858",
"0.7907733",
"0.7877703",
"0.7869816",
"0.78360236",
"0.7821741",
"0.78216183",
"0.7799445",
"0.77953625",
"0.77770406",
"0.77718073",
"0.77718073",
"0.77718073",
"0.77718073",
"0.77718073",
"0.7753755",
"0.77364755",
"0.7722243",
"0.7703729",
"0.76961815",
"0.769094",
"0.7674927",
"0.76709306",
"0.76686037",
"0.76459545",
"0.7643949",
"0.7636595",
"0.7617066",
"0.76102805",
"0.7589444",
"0.7577249",
"0.7575938",
"0.75666887",
"0.756119",
"0.75564915",
"0.7555197",
"0.7553694",
"0.7553694",
"0.75533205",
"0.7549594",
"0.7540032",
"0.7533719",
"0.752772",
"0.7518103",
"0.7510889",
"0.74972624",
"0.74961853",
"0.74946773",
"0.74832565",
"0.7480396",
"0.7480377",
"0.7471421",
"0.74674964",
"0.7464991",
"0.744107",
"0.7433284",
"0.74189574",
"0.74172693",
"0.74172693",
"0.7415958",
"0.7405464",
"0.7404551",
"0.7404551",
"0.7404551",
"0.7404551",
"0.73971796",
"0.7393851",
"0.73818666",
"0.7370891",
"0.73685485",
"0.7363422",
"0.7363422",
"0.73616445",
"0.73605627",
"0.7359364",
"0.7359236",
"0.73515725",
"0.73494947",
"0.7346376",
"0.7339453",
"0.7337934",
"0.7329621",
"0.7327605",
"0.732758",
"0.73207223"
] | 0.75240034 | 58 |
sha1_file.c unpack_object_header_buffer unpackobjects.c unpack_one | def parse_type_and_size
hdr = byte
hdr_size = 1
continue = (hdr & 0b10000000)
type = (hdr & 0b01110000) >> 4
size = (hdr & 0b00001111)
size_len = 4
while continue != 0
hdr = byte
hdr_size += 1
continue = (hdr & 0b10000000)
size += (hdr & 0b01111111) << size_len
size_len += 7
end
return [type, size, hdr_size]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def unpack(data); end",
"def unpack(binary)\n end",
"def unpack_header(header)\n # fixed fields\n self.version = header.slice!(0)\n self.type = header.slice!(0)\n self.seq_no = header.slice!(0)\n self.flags = header.slice!(0)\n self.session_id = header.slice!(0..3)\n self.length = header.slice!(0..3)\n return(nil)\n end",
"def get_object(id)\n path = object_path(id)\n\n if File.exists?(path)\n buf = open(path, \"rb\") { |f| f.read }\n\n raise \"not a loose object: #{id}\" if not legacy_loose_object?(buf)\n\n header, content = Zlib::Inflate.inflate(buf).split(/\\0/, 2)\n type, size = header.split(/ /, 2)\n\n raise \"bad object: #{id}\" if content.length != size.to_i\n else\n content, type = get_object_from_pack(id)\n end\n\n return type, content\n end",
"def build_object(type, content)\n # taken from http://schacon.github.io/gitbook/7_how_git_stores_objects.html\n header = \"#{type} #{content.size}\\0\"\n store = header + content\n [Digest::SHA1.hexdigest(store), Zlib::Deflate.deflate(store)]\n end",
"def unpack_one format\n @b.unpack_one format\n end",
"def file_sha1\n Digest::SHA1.file(self).hexdigest\n end",
"def unpack1 format\n unpack(format).first\n end",
"def put_raw_object(content, type)\n size = content.length.to_s\n LooseStorage.verify_header(type, size)\n \n header = \"#{type} #{size}\\0\"\n store = header + content\n \n sha1 = Digest::SHA1.hexdigest(store)\n path = @directory+'/'+sha1[0...2]+'/'+sha1[2..40]\n \n if !File.exists?(path)\n content = Zlib::Deflate.deflate(store)\n \n FileUtils.mkdir_p(@directory+'/'+sha1[0...2])\n File.open(path, 'w') do |f|\n f.write content\n end\n end\n return sha1\n end",
"def unpack(p0) end",
"def read_binary_object(fname,fd)\n # first: read the marker byte\n buff = fd.read(1)\n\n object_length = buff.unpack(\"C*\")\n object_length = object_length[0] & 0xF\n\n buff = buff.unpack(\"H*\")\n object_type = buff[0][0].chr\n\n if(object_type != \"0\" && object_length == 15) then\n object_length = read_binary_object(fname,fd)\n object_length = object_length.value\n end\n\n retval = nil\n case object_type\n when '0' then # null, false, true, fillbyte\n retval = read_binary_null_type(object_length)\n when '1' then # integer\n retval = read_binary_int(fname,fd,object_length)\n when '2' then # real\n retval = read_binary_real(fname,fd,object_length)\n when '3' then # date\n retval = read_binary_date(fname,fd,object_length)\n when '4' then # data\n retval = read_binary_data(fname,fd,object_length)\n when '5' then # byte string, usually utf8 encoded\n retval = read_binary_string(fname,fd,object_length)\n when '6' then # unicode string (utf16be)\n retval = read_binary_unicode_string(fname,fd,object_length)\n when 'a' then # array\n retval = read_binary_array(fname,fd,object_length)\n when 'd' then # dictionary\n retval = read_binary_dict(fname,fd,object_length)\n end\n\n return retval\n end",
"def object_for_hash(given_hash)\n @opener.open(name, \"r\") do |fp|\n given_hash.force_encoding(\"ASCII-8BIT\") if given_hash.respond_to?(:force_encoding)\n entry = nil\n if index\n starting_at = index.offset_for_hash(given_hash)\n return PackFileEntry.at(starting_at, fp).to_raw_object\n else\n starting_at = cached_offset(given_hash) || DATA_START_OFFSET\n fp.seek(starting_at, IO::SEEK_SET)\n while fp.tell < @end_of_data\n entry = PackFileEntry.read(fp)\n cache_entry(entry)\n return entry.to_raw_object if entry.hash_id == given_hash\n end\n end\n end\n nil\n end",
"def parse obj\nstr = obj.to_s.sub %r/\\Aurn:uuid:/, ''\nstr.gsub! %r/[^0-9A-Fa-f]/, ''\n# raw = str[0..31].lines.to_a.pack 'H*'\n# lines not supported before ruby 1.8.7, and that's not there on heroku\nraw = str[0..31].split(/\\n/).pack 'H*'\nret = new raw\nret.freeze\nret\nend",
"def get(id)\n raise ArgumentError, 'Invalid id given' if !(String === id)\n\n if id =~ SHA_PATTERN\n raise ArgumentError, \"Sha too short: #{id}\" if id.length < 5\n\n trie = @objects.find(id)\n raise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n return trie.value if !trie.empty?\n elsif id =~ REVISION_PATTERN\n list = git_rev_parse(id).split(\"\\n\") rescue nil\n raise NotFound, \"Revision not found: #{id}\" if !list || list.empty?\n raise NotFound, \"Revision is ambiguous: #{id}\" if list.size > 1\n id = list.first\n\n trie = @objects.find(id)\n raise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n return trie.value if !trie.empty?\n else\n raise ArgumentError, \"Invalid id given: #{id}\"\n end\n\n @logger.debug \"gitrb: Loading #{id}\"\n\n path = object_path(id)\n if File.exists?(path) || (glob = Dir.glob(path + '*')).size >= 1\n if glob\n raise NotFound, \"Sha is ambiguous: #{id}\" if glob.size > 1\n path = glob.first\n id = path[-41..-40] + path[-38..-1]\n end\n\n buf = File.open(path, 'rb') { |f| f.read }\n\n raise NotFound, \"Not a loose object: #{id}\" if !legacy_loose_object?(buf)\n\n header, content = Zlib::Inflate.inflate(buf).split(\"\\0\", 2)\n type, size = header.split(' ', 2)\n\n raise NotFound, \"Bad object: #{id}\" if content.length != size.to_i\n else\n trie = @packs.find(id)\n\traise NotFound, \"Object not found: #{id}\" if trie.empty?\n\traise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n id = trie.key\n pack, offset = trie.value\n content, type = pack.get_object(offset)\n end\n\n @logger.debug \"gitrb: Loaded #{type} #{id}\"\n\n set_encoding(id)\n object = GitObject.factory(type, :repository => self, :id => id, :data => content)\n @objects.insert(id, object)\n object\n end",
"def decompressor; end",
"def decompress(data); end",
"def pack\n end",
"def digestFile(filename)\n\tfh = File.open(filename)\n\tarray = String.new\n\tpreparse = true\n\tfh.each_line do |line|\n\t\tif preparse\n\t\t\tif line =~ /\\{/\n\t\t\t\tpreparse = false\n\t\t\t\tarray << line\n\t\t\tend\n\t\telse\n\t\t\t#Sub numberlongs\n\t\t\tif line.include? \"NumberLong\\(\"\n\t\t\t\tline.slice!(/NumberLong\\(/)\n\t\t\t\tline.slice!(/\\)/)\n\t\t\tend\n\n\t\t\t#ObjectId(\"4ef4af0963389003f300c2e7\"),\n\t\t\tif line.include? \"ObjectId\"\n\t\t\t\tline = line.gsub(\"ObjectId\\(\\\"\", \"\\\"ObjectId\\(\")\n\t\t\t\tline = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n\t\t\tend\n\n\t\t\t#Timestamp(10000, 27),\n\t\t\tif line.include? \": Timestamp\\(\"\n\t\t\t\tline = line.gsub(\"Timestamp\\(\", \"\\\"Timestamp\\(\")\n\t\t\t\tline = line.gsub(\"\\)\", \"\\)\\\"\")\n\t\t\tend\n\t\t\t#ISODate(\"2012-01-26T00:00:00Z\")\n\t\t\tif line.include? \": ISODate\\(\\\"\"\n\t\t\t\tline = line.gsub(\"ISODate\\(\\\"\", \"\\\"ISODate\\(\")\n\t\t\t\tline = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n\t\t\tend\n #BinData\n if line.include? \": BinData\\(\"\n line = line.gsub(\"BinData\\(\", \"\\\"BinData\\(\")\n line = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n line = line.gsub(\",\\\"\", \",\")\n end\n if line.include? \"\\\" : \\/\"\n line = line.gsub(\"\\\" : \\/\", \"\\\" : \\\"\\/\")\n line = line.gsub(\"\\/,\", \"\\/\\\",\")\n end\n\t\t\tif line !~ /bye/\n\t\t\t array << line\n\t\t\tend\n\t\tend\n\tend\n\tfh.close\n\tdoc = Yajl::Parser.parse(array)\n\treturn doc\nend",
"def header(buf)\n peek = buf.readbyte(0)\n\n header = {}\n header[:type], type = HEADREP.find do |_t, desc|\n mask = (peek >> desc[:prefix]) << desc[:prefix]\n mask == desc[:pattern]\n end\n\n fail CompressionError unless header[:type]\n\n header[:name] = integer(buf, type[:prefix])\n\n case header[:type]\n when :indexed\n fail CompressionError if (header[:name]).zero?\n header[:name] -= 1\n when :changetablesize\n header[:value] = header[:name]\n else\n if (header[:name]).zero?\n header[:name] = string(buf)\n else\n header[:name] -= 1\n end\n header[:value] = string(buf)\n end\n\n header\n end",
"def sha1; end",
"def sha1; end",
"def read_objects(file, objects)\n objects.each do |data_type|\n num = DataNumber.new.read(file).data\n\n # use i to indentify the type of object.\n num.times do ||\n _obj = data_type.new.read(file)\n end\n end\n end",
"def inflate_object(object)\n z = Zlib::Inflate.new()\n object[:data] = z.inflate(object[:data])\n z.close\n\n return object\n end",
"def digest_sha1(*files)\n files.flatten.collect { |file| \n File.exists?(file) ? Digest::SHA1.hexdigest(File.read(file)) : nil\n }\n end",
"def parse_body(buffer)\n @id = shift_short(buffer)\n while(buffer.bytesize>0)\n @return_codes << shift_byte(buffer)\n end\n # puts \"subacks #{@return_codes.inspect}\"\n end",
"def unpack_body(body)\n # fixed fields\n self.flags = body.slice!(0)\n self.authen_method = body.slice!(0)\n self.priv_lvl = body.slice!(0)\n self.authen_type = body.slice!(0)\n self.authen_service = body.slice!(0)\n self.user_len = body.slice!(0)\n self.port_len = body.slice!(0)\n self.rem_addr_len = body.slice!(0)\n self.arg_cnt = body.slice!(0)\n \n # variable fields\n @arg_lens = (body.slice!(0..(@arg_cnt - 1))).unpack('C*') if (@arg_cnt != 0)\n @user = body.slice!(0..(@user_len - 1)) if (@user_len != 0)\n @port = body.slice!(0..(@port_len - 1)) if (@port_len != 0)\n @rem_addr = body.slice!(0..(@rem_addr_len - 1)) if (@rem_addr_len != 0)\n \n if (self.arg_cnt != 0)\n @args = []\n @arg_lens.each {|x| @args.push( body.slice!( 0..(x - 1) ) )}\n end\n \n return(nil)\n end",
"def read_header(absfilepath)\n tmp_filename= File.basename(absfilepath)\n \n case File.basename(absfilepath)\n when /^P.{5}\\.7$|^I\\..{3}/\n # check for \n # Try reading Pfiles or Genesis I-Files with GE's printraw\n # printraw works on the new waisman p-files\n # rdgehdr works on wimr p-files, and old waisman p-files \n @current_hdr_reader = PRINTRAW\n #puts \"aaaaaaa absfilepath=\"+absfilepath\n header = `#{PRINTRAW} '#{absfilepath}' 2> /dev/null`\n #header = `#{RDGEHDR} #{absfilepath}`\n # puts \"bbbbb pfile header=\"+header\n header = header.encode(\"UTF-8\", :invalid => :replace, :undef => :replace, :replace => \"\").force_encoding('UTF-8')\n if ( header.chomp != \"\" and\n header.length > MIN_HDR_LENGTH )\n @current_hdr_reader = nil\n return [ header, PRINTRAW ]\n end\n when /^ScanArchive_.{10,}\\.h5\\.json$/\n json_file = File.read(absfilepath)\n data_hash = JSON.parse(json_file)\n ####header =\"se_desc = \"+data_hash[\"SERIES INFORMATION\"][\"Series Desc\"]+\"\n####image_uid = \"+data_hash[\"RHUSER and OPUSER INFORMATION\"][\"imagehead.image_uid\"]+\"\\n\"\n ## if ( header.chomp != \"\" )#and header.length > MIN_HDR_SUMMARY_LENGTH )\n if !data_hash.nil?\n @current_hdr_reader = nil\n # puts data_hash.to_s\n # FAILING IN HEADER READ - WHERE IS THE HEADER TRYING TO GET READ? NOT STOPING AT PRINTRAW_SUMMARY\n # CAN THE data_hash be passed instead of the header, and then read in the header_json_reader?\n return [data_hash, nil] ###PRINTRAW_H5_JSON ]\n ###return [ header, PRINTRAW_H5_JSON ]\n end\n # need to read json\n # create header\n when /^P.{5}\\.7\\.summary/\n # check for \n @current_hdr_reader = PRINTRAW_SUMMARY\n # puts \"aaaaaaa summary absfilepath=\"+absfilepath\n header = `#{PRINTRAW_SUMMARY} '#{absfilepath}' 2> /dev/null`\n header = header.encode(\"UTF-8\", :invalid => :replace, :undef => :replace, :replace => \"\").force_encoding('UTF-8')\n if ( header.chomp != \"\" and\n header.length > MIN_HDR_SUMMARY_LENGTH )\n @current_hdr_reader = nil\n return [ header, PRINTRAW_SUMMARY ]\n end\n\n # Try reading Pfiles or Genesis I-Files with GE's rdgehdr -- rdgehdr newer version needs macos 10.8, adrcdev2 = 10.7.5 - \n # works on old headers, not on new header format\n ###@current_hdr_reader = RDGEHDR\n ###header = `#{RDGEHDR} '#{absfilepath}' 2> /dev/null`\n #header = `#{RDGEHDR} #{absfilepath}`\n ###if ( header.chomp != \"\" and\n ### header.length > MIN_HDR_LENGTH )\n ### @current_hdr_reader = nil\n ### return [ header, RDGEHDR ]\n ### end\n else\n # Try reading with RubyDICOM\n @current_hdr_reader = RUBYDICOM_HDR\n ####header = DICOM::DObject.new(absfilepath) # changing from dicom 0.8.0 to 0.9.5\n header = DICOM::DObject.read(absfilepath)\n\n if defined? header.read_success && header.read_success\n @current_hdr_reader = nil\n return [header, RUBYDICOM_HDR] \n end\n \n # Try reading with AFNI's dicom_hdr\n @current_hdr_reader = DICOM_HDR\n header = `#{DICOM_HDR} '#{absfilepath}' 2> /dev/null`\n #header = `#{DICOM_HDR} #{absfilepath}`\n if ( header.index(\"ERROR\") == nil and \n header.chomp != \"\" and \n header.length > MIN_HDR_LENGTH )\n @current_hdr_reader = nil\n return [ header, DICOM_HDR ]\n end\n end\n\n @current_hdr_reader = nil\n return [ nil, nil ]\n end",
"def parse\n _magic, command, length, checksum = @buffer.unpack('a4A12Va4')\n payload = @buffer[HEAD_SIZE...HEAD_SIZE + length]\n if Digest::SHA256.digest(Digest::SHA256.digest(payload))[0...4] != checksum\n return if payload.size < length\n raise 'TODO: handle checksum error'\n end\n @buffer = @buffer[HEAD_SIZE + length..-1] || ''\n handler.send \"handle_#{command}\", payload\n !@buffer.empty?\n end",
"def sha1(path)\n result = nil\n chunk_size = 10240\n File.open(path, \"r\") do |file|\n sha1 = Digest::SHA1.new\n\n while true\n chunk = file.read chunk_size\n break if ! chunk\n sha1.update chunk\n end\n result = sha1.hexdigest\n end\n result\n end",
"def load_checksums(dobj)\n log ' - load_checksums()'\n dobj.object_files.each { |file| file.provider_md5 = file.md5 }\n end",
"def git_object(hash)\n return `git cat-file -p #{hash}`\nend",
"def demarshal_object(blob)\n Marshal.load(blob)\n end",
"def unpack_body(body)\n # fixed fields\n self.status = body.slice!(0)\n self.flags = body.slice!(0)\n self.server_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1) \n \n # variable-length fields\n self.server_msg = body.slice!(0..(@server_msg_len - 1)) if (@server_msg_len != 0)\n self.data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n return(nil)\n end",
"def sha1=(_); end",
"def sha1\n @sha1 ||= digest(path, :sha1)\n end",
"def sha1(data, c: true)\n if c && $linux && File.file?(PATH_SHA1)\n File.binwrite(\"util/#{HASH_INPUT_FN}\", data)\n code = shell(\"./util/sha1 ./util/#{HASH_INPUT_FN} ./util/#{HASH_OUTPUT_FN}\")\n return nil if !code\n hash = File.binread(\"util/#{HASH_OUTPUT_FN}\")\n FileUtils.rm([\"./util/#{HASH_INPUT_FN}\", \"./util/#{HASH_OUTPUT_FN}\"])\n hash\n else\n Digest::SHA1.digest(data)\n end\nrescue => e\n lex(e, 'Failed to compute the SHA1 hash')\n nil\nend",
"def get_object(resource, type, id, location = 0) #:yields: data_object\n header = {\n 'Accept' => mimemap.keys.join(',')\n }\n\n data = {\n 'Resource' => resource,\n 'Type' => type,\n 'ID' => id,\n 'Location' => location.to_s\n }\n\n response = request(@urls['GetObject'], data, header)\n results = block_given? ? 0 : []\n\n if response['content-type'] && response['content-type'].include?('text/xml')\n # This probably means that there was an error.\n # Response parser will likely raise an exception.\n rr = @response_parser.parse_object_response(response.body)\n return rr\n elsif response['content-type'] && response['content-type'].include?('multipart/parallel')\n content_type = process_content_type(response['content-type'])\n\n# TODO: log this\n# puts \"SPLIT ON #{content_type['boundary']}\"\n boundary = content_type['boundary']\n if boundary =~ /\\s*'([^']*)\\s*/\n boundary = $1\n end\n parts = response.body.split(\"\\r\\n--#{boundary}\")\n\n parts.shift # Get rid of the initial boundary\n\n# TODO: log this\n# puts \"GOT PARTS #{parts.length}\"\n\n parts.each do |part|\n (raw_header, raw_data) = part.split(\"\\r\\n\\r\\n\")\n\n# TODO: log this\n# puts raw_data.nil?\n next unless raw_data\n\n data_header = process_header(raw_header)\n data_object = RETS::DataObject.new(data_header, raw_data)\n\n if block_given?\n yield data_object\n results += 1\n else\n results << data_object\n end\n end\n else\n info = {\n 'content-type' => response['content-type'], # Compatibility shim. Deprecated.\n 'Content-Type' => response['content-type'],\n 'Object-ID' => response['Object-ID'],\n 'Content-ID' => response['Content-ID']\n }\n\n if response['Transfer-Encoding'].to_s.downcase == \"chunked\" || response['Content-Length'].to_i > 100 then\n data_object = RETS::DataObject.new(info, response.body)\n if block_given?\n yield data_object\n results += 1\n else\n results << data_object\n end\n end\n end\n\n results\n end",
"def unpack\nraw_bytes.unpack \"NnnCCa6\"\nend",
"def header(h, buffer = Buffer.new)\n rep = HEADREP[h[:type]]\n\n if h[:type] == :indexed\n buffer << integer(h[:name], rep[:prefix])\n\n else\n if h[:name].is_a? Integer\n buffer << integer(h[:name]+1, rep[:prefix])\n else\n buffer << integer(0, rep[:prefix])\n buffer << string(h[:name])\n end\n\n if h[:type] == :substitution\n buffer << integer(h[:index], 0)\n end\n\n if h[:value].is_a? Integer\n buffer << integer(h[:value], 0)\n else\n buffer << string(h[:value])\n end\n end\n\n # set header representation pattern on first byte\n fb = buffer[0].unpack(\"C\").first | rep[:pattern]\n buffer.setbyte(0, fb)\n\n buffer\n end",
"def unpack_one! format\n ary = @b.unpack \"#{format}a*\"\n @b = ary.pop\n ary[0]\n end",
"def read_from_blobs_file(entry)\n begin\n raw = File.read(@blobs_file_name, entry[BYTES], entry[START])\n rescue => e\n PEROBS.log.fatal \"Cannot read blobs file #{@blobs_file_name}: \" +\n e.message\n end\n if Zlib.crc32(raw, 0) != entry[CRC]\n PEROBS.log.fatal \"BTreeBlob for object #{entry[ID]} has been \" +\n \"corrupted: Checksum mismatch\"\n end\n\n raw\n end",
"def read_binary(file); end",
"def file_remote_digestsha1(file_name)\n data = read_file(file_name)\n chksum = nil\n if data\n chksum = Digest::SHA1.hexdigest(data)\n end\n return chksum\n end",
"def parse(buf)\n n, count = 0, 0\n while n < buf.size\n key_size, value_size = buf[n, 8].unpack('NN')\n data_size = key_size + 8\n data_size += value_size if value_size != DELETE\n data = buf[n, data_size]\n n += data_size\n unless buf[n, 4] == crc32(data)\n raise 'CRC mismatch: your stash might be corrupted!'\n end\n n += 4\n yield(value_size == DELETE ? [data[8, key_size]] : [data[8, key_size], data[8 + key_size, value_size]])\n count += 1\n end\n count\n end",
"def decode_objects(objects)\n objects.collect do |o|\n if(o[:data])\n o = inflate_object(o) if(is_object_deflated?(o))\n o[:data] = postscript_to_text(o[:data])\n end\n end\n\n objects.compact\n end",
"def parse(data, compact)\n magic = data[0..3]\n raise InvalidHeader, \"No magic bytes found at start\" unless %w( SDPX XPDS).include?(magic)\n \n is_le = (magic == \"XPDS\")\n \n version_check = FileInfo.only(:magic, :version)\n begin\n result = is_le ? version_check.apply_le!(data) : version_check.apply!(data)\n raise InvalidHeader, \"Unknown version tag #{result.version}\" unless result.version =~ /V(\\d)\\.(\\d+)/i\n rescue ArgumentError\n raise InvalidHeader, \"Cannot unpack header\"\n end\n \n struct = compact ? CompactDPX : DPX\n is_le ? struct.apply_le!(data) : struct.apply!(data)\n end",
"def extract_pack\n io = Zlib::GzipReader.new(DataDragon.data_pack_path.open)\n\n Gem::Package::TarReader.new(io) do |tar|\n tar.each do |tarfile|\n destination_file = (DataDragon.data_unpacked_path + tarfile.full_name)\n\n if tarfile.directory?\n destination_file.mkpath\n else\n destination_directory = destination_file.dirname\n destination_directory.mkpath unless destination_directory.directory?\n destination_file.write(tarfile.read)\n end\n end\n end\n end",
"def marshal_load array # :nodoc:\n initialize array[1]\n\n @parser = array[2]\n @comment = array[3]\n\n @file_stat = nil\n end",
"def readpartial(length = nil)\n raise StateError, \"already closed\" if @closed\n\n if @inflater.total_out == @length\n # Leftover data\n @reader.finish_object(@inflater.finish)\n @closed = true\n return\n end\n\n chunk = @reader.readpartial(length)\n data = @inflater.inflate(chunk)\n @sha1.update(data)\n\n data\n end",
"def download_remote_sha1\n @log.info('Downloading Elasticsearch SHA1.')\n\n @remote_sha1 = ''\n open(@download.verify_url) do |file|\n @remote_sha1 = file.read\n end\n\n @remote_sha1 = @remote_sha1.split(/\\s\\s/)[0]\n\n @remote_sha1\n end",
"def parse_body(buffer)\n super(buffer)\n @id = shift_short(buffer)\n unless buffer.empty?\n raise ExtraBytesError, \"at end of Publish Release packet\"\n end\n end",
"def marshal_load(arg0)\n end",
"def unpack_body(body)\n # fixed fields\n self.action = body.slice!(0)\n self.priv_lvl = body.slice!(0)\n self.authen_type = body.slice!(0)\n self.service = body.slice!(0)\n self.user_len = body.slice!(0)\n self.port_len = body.slice!(0)\n self.rem_addr_len = body.slice!(0)\n self.data_len = body.slice!(0)\n \n # variable fields\n self.user = body.slice!(0..(@user_len - 1)) if (@user_len != 0)\n self.port = body.slice!(0..(@port_len - 1)) if (@port_len != 0)\n self.rem_addr = body.slice!(0..(@rem_addr_len - 1)) if (@rem_addr_len != 0)\n self.data = body.slice!(0..(@data_len - 1)) if (@data_len != 0) \n return(nil)\n end",
"def fix_dex_header(dexfile)\n dexfile = dexfile.unpack('a8LH40a*')\n dexfile[2] = Digest::SHA1.hexdigest(dexfile[3])\n dexfile[1] = Zlib.adler32(dexfile[2..-1].pack('H40a*'))\n dexfile.pack('a8LH40a*')\n end",
"def head(bytes)\n read(bytes)\n end",
"def header(h, buffer = \"\")\n rep = HEADREP[h[:type]]\n\n if h[:type] == :indexed\n buffer << integer(h[:name], rep[:prefix])\n\n else\n if h[:name].is_a? Integer\n buffer << integer(h[:name]+1, rep[:prefix])\n else\n buffer << integer(0, rep[:prefix])\n buffer << string(h[:name])\n end\n\n if h[:type] == :substitution\n buffer << integer(h[:index], 0)\n end\n\n if h[:value].is_a? Integer\n buffer << integer(h[:value], 0)\n else\n buffer << string(h[:value])\n end\n end\n\n # set header representation pattern on first byte\n fb = buffer[0].unpack(\"C\").first | rep[:pattern]\n buffer.setbyte(0, fb)\n\n buffer\n end",
"def unpack_body(body)\n # fixed-length fields\n self.user_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1)\n self.flags = body.slice!(0)\n \n # variable-length fields\n @user_msg = body.slice!(0..(@user_msg_len - 1)) if (@user_msg_len != 0)\n @data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n\n return(nil)\n end",
"def populate\n response = self.container.connection.cfreq(\"HEAD\",@storagehost,@storagepath)\n raise NoSuchObjectException, \"Object #{@name} does not exist\" if (response.code != \"204\")\n @bytes = response[\"content-length\"]\n @last_modified = Time.parse(response[\"last-modified\"])\n @etag = response[\"etag\"]\n @content_type = response[\"content-type\"]\n resphash = {}\n response.to_hash.select { |k,v| k.match(/^x-object-meta/) }.each { |x| resphash[x[0]] = x[1][0].to_s }\n @metadata = resphash\n true\n end",
"def unpack_body(body)\n # fixed-length fields\n self.server_msg_len = body.slice!(0..1)\n self.data_len = body.slice!(0..1)\n self.status = body.slice!(0)\n \n # variable-length fields\n @server_msg = body.slice!(0..(@server_msg_len - 1)) if (@server_msg_len != 0)\n @data = body.slice!(0..(@data_len - 1)) if (@data_len != 0)\n\n return(nil)\n end",
"def import\n fragments.each(&:destroy)\n File.open(bin_file_name, \"rb\") do |f|\n order = 0\n new_lines = 0\n until f.eof?\n new_lines += import_next_object(f, order)\n order += 1\n end\n update! lines: new_lines\n end\n add_locations\n end",
"def read_object(id)\n return nil unless (index_entry = find(id))\n read_from_blobs_file(index_entry)\n end",
"def put_object(type, content)\n data = \"#{type} #{content.length}\\0#{content}\"\n id = sha(data)\n path = object_path(id)\n\n unless File.exists?(path)\n FileUtils.mkpath(File.dirname(path))\n open(path, 'wb') do |f|\n f.write Zlib::Deflate.deflate(data)\n end\n end\n\n id\n end",
"def read(filename)\n f = File.open(filename, 'r')\n f.seek(-ID3::ID3v1tagSize, IO::SEEK_END)\n hastag = (f.read(3) == 'TAG')\n if hastag\n f.seek(-ID3::ID3v1tagSize, IO::SEEK_END)\n @raw = f.read(ID3::ID3v1tagSize)\n\n # self.parse!(raw) # we should use \"parse!\" instead of duplicating code!\n\n if (raw.getbyte(ID3v1versionbyte) == 0) \n @version = \"1.0\"\n else\n @version = \"1.1\"\n end\n else\n @raw = @version = nil\n end\n f.close\n #\n # now parse all the fields\n\n ID3::SUPPORTED_SYMBOLS[@version].each{ |key,val|\n if val.class == Range\n # self[key] = @raw[val].squeeze(\" \\000\").chomp(\" \").chomp(\"\\000\")\n self[key] = @raw[val].strip\n elsif val.class == Fixnum\n self[key] = @raw.getbyte(val).to_s\n else \n # this can't happen the way we defined the hash..\n # printf \"unknown key/val : #{key} / #{val} ; val-type: %s\\n\", val.type\n end\n }\n hastag\n end",
"def read_header(fp)\n tags = Support::HexString.from_bin(fp.read(1)).ord\n type = (tags & 0x70) >> 4\n size = tags & 0xF\n shift = 4\n while tags & 0x80 > 0\n tags = Support::HexString.from_bin(fp.read(1)).ord\n size += (tags & 0x7F) << shift\n shift += 7\n end\n [type, size]\n end",
"def parse_chunk_ACCT stream\n parse_itemized_chunk stream, [\n {:name => :id},\n {:name => :name, :encoding => :aes256},\n {:name => :group, :encoding => :aes256},\n {:name => :url, :encoding => :hex},\n {:name => :extra},\n {:name => :favorite},\n {:name => :shared_from_id},\n {:name => :username, :encoding => :aes256},\n {:name => :password, :encoding => :aes256},\n {:name => :password_protected},\n {:name => :generated_password},\n {:name => :sn}, # ?\n {:name => :last_touched},\n {:name => :auto_login},\n {:name => :never_autofill},\n {:name => :realm_data},\n {:name => :fiid}, # ?\n {:name => :custom_js},\n {:name => :submit_id},\n {:name => :captcha_id},\n {:name => :urid}, # ?\n {:name => :basic_authorization},\n {:name => :method},\n {:name => :action, :encoding => :hex},\n {:name => :group_id},\n {:name => :deleted},\n {:name => :attach_key},\n {:name => :attach_present},\n {:name => :individual_share},\n {:name => :unknown1}\n ]\n end",
"def inflate(data); end",
"def read_object\n # Incrementally parse next object.\n begin\n while not @object\n @sock.readpartial(MAX_READ_CHUNK, @inbuf)\n @parser << @inbuf\n PennJSON_Conversation.do_clear(@inbuf)\n end\n rescue Yajl::ParseError\n PennJSON.panic 'Malformed protocol message.'\n rescue EOFError\n raise SystemExit, 'Remote closed connection.'\n end\n\n # Fetch parsed object. Could make this a queue, I suppose.\n result = @object\n @object = nil\n return result\n end",
"def header_fields\n IO.binread(@path, HEADER_SIZE, 0).unpack(\"N5C1\")\n end",
"def parse_zip64_extra(for_local_header); end",
"def initialize(fileobj, offset=nil)\n size = begin \n File.size fileobj.path\n rescue ERRNO::ENOENT, IOError\n fileobj.seek(0, IO::SEEK_END)\n fileobj.tell\n end\n\n # If we don't get an offset, try to skip an ID3v2 tag.\n if offset.nil?\n fileobj.rewind\n idata = fileobj.read 10\n if (val = idata.unpack('a3xxxa4')).include? nil\n id3, insize = '', 0\n else\n id3, insize = val\n end\n\n insize = Mutagen::ID3::BitPaddedInteger.new insize\n\n if id3 == 'ID3' and insize > 0\n offset = insize + 10\n else\n offset = 0\n end\n end\n\n # Try to find two valid headers (meaning, very likely, MPEG data)\n # at the given offset, 30% through the file, 60% through the file,\n # and 90% through the file.\n flag = false\n [offset, 0.3*size, 0.6*size, 0.9*size].each do |i|\n begin \n try(fileobj, Integer(i), size - offset)\n rescue Mutagen::MP3::ERROR\n pass\n else \n flag = true\n break\n end\n end\n # If we can't find any two consecutive frames, try to find just\n # one frame back at the original offset given.\n unless flag\n try(fileobj, offset, size - offset, false)\n @sketchy = true\n end\n end",
"def header(buf)\n peek = buf.readbyte(0)\n\n header = {}\n header[:type], type = HEADREP.select do |t, desc|\n mask = (peek >> desc[:prefix]) << desc[:prefix]\n mask == desc[:pattern]\n end.first\n\n header[:name] = integer(buf, type[:prefix])\n if header[:type] != :indexed\n header[:name] -= 1\n\n if header[:name] == -1\n header[:name] = string(buf)\n end\n\n if header[:type] == :substitution\n header[:index] = integer(buf, 0)\n end\n\n header[:value] = string(buf)\n end\n\n header\n end",
"def sha1_hexdigest\n @hexdigest ||= begin\n raise StateError, \"not finished reading object\" unless @closed\n @sha1.hexdigest\n end\n end",
"def sha1(name)\n Digest::SHA1.file(path(name)).hexdigest\n end",
"def initialize(filename)\n\t\tbegin\n @sourcefile = filename\n \n\t\t\tFile.open(filename, \"r\") do |objfile|\n\t\t\t\tunless getl(objfile) == \"LINK\"\n\t\t\t\t\tputs \"Invalid file format: \" + filename\n\t\t\t\t\treturn nil\n\t\t\t\tend\n\n\t\t\t\t# Read header info\n\t\t\t\t@nsegs, @nsyms, @nrlocs = getl(objfile).split(' ').collect {|x| x.to_i}\n\t\t\t\t\n\t\t\t\t# Parse segs\n\t\t\t\t@segrecs = [], @segnames = {}\n\t\t\t\tgather_segs(objfile)\n\t\t\t\t\n\t\t\t\t# Parse symbols\n\t\t\t\t@symrecs = [], @symnames = {}\n\t\t\t\tgather_syms(objfile)\n\t\t\t\t\n\t\t\t\t# Parse relocations\n\t\t\t\t@rlocrecs = []\n\t\t\t\tgather_rlocs(objfile)\n\t\t\t\t\n\t\t\t\t# Slurp in data\n\t\t\t\t@segrecs.select {|seg| /P/===seg[:type]}.each do |seg|\n\t\t\t\t\tseg[:data] = getl(objfile).hex2bin\n\t\t\t\tend\n\n\t\t\tend\n\t\trescue\n\t\t\tputs \"Could not open object file: \" + filename\n\t\tend\n\tend",
"def initialize(name, opener)\n @name = name\n @opener = opener\n opener.open(name, \"r\") do |fp|\n # Check signature\n unless fp.read(4) == \"PACK\"\n raise ArgumentError.new(\"#{name} is not a packfile.\")\n end\n @version = fp.read(4).unpack(\"N\").first\n @size = fp.read(4).unpack(\"N\").first\n cur = fp.tell\n fp.seek(0, IO::SEEK_END)\n @end_of_data = fp.tell - 20\n end\n possible_index_path = name[0..(name.size - File.extname(name).size - 1)] + \".idx\"\n if File.exist? possible_index_path\n # use a persistent file pointer\n fp = File.open(possible_index_path, \"r\")\n @index = PackFileIndex.parse(fp)\n end\n @offset_cache = {}\n end",
"def marshal_load(array)\n self.version = array[0]\n end",
"def check_sha1(filename, sha1_hash)\n actual_hash = Digest::SHA1.hexdigest(::IO.binread(filename))\n return actual_hash == sha1_hash\n end",
"def demarshal\n Marshal.load(decompress)\n end",
"def create_sha1 str, namespace\nsha1 = Digest::SHA1.new\nsha1.update namespace.raw_bytes\nsha1.update str\nsum = sha1.digest\nraw = mask 5, sum[0..15]\nret = new raw\nret.freeze\nret\nend",
"def initialize(apk_file)\n data = nil\n Zip.warn_invalid_date = false\n\n # Get resources.arsc from the APK file\n Zip::File.foreach(apk_file) do |f|\n if f.name.match(/resources.arsc/)\n data = f.get_input_stream.read.force_encoding('BINARY')\n end\n end\n\n # Parse the Table Chunk\n ## Header\n header_type = read_short(data, HEADER_START)\n header_size = read_short(data, HEADER_START+2)\n header_chunk_size = read_word(data, HEADER_START+4)\n header_package_count = read_word(data, HEADER_START+8)\n puts \"Resource Package Count = #{header_package_count}\" if DEBUG\n\n # Parse the StringPool Chunk\n ## Header\n startoffset_pool = HEADER_START + header_size\n puts \"Parse Main StringPool Chunk\" if DEBUG\n @stringpool_main = parse_stringpool(data, startoffset_pool)\n puts \"#{@stringpool_main.values.length} strings found\" if DEBUG\n\n # Parse the Package Chunk\n ## Header\n startoffset_package = startoffset_pool + @stringpool_main.header.chunk_size\n @packages = Hash.new()\n i = 0\n while i < header_package_count\n package_element = parse_package(data, startoffset_package)\n puts \"Package #{package_element.package_header.id}\" if DEBUG\n startoffset_package = startoffset_package + package_element.package_header.header.chunk_size\n @packages[package_element.package_header.id] = package_element\n\n i += 1\n end\n\n end",
"def decode_file(encoders, filename)\n code = File.read(filename).gsub(/^.*require\\s*['\"]whiteout['\"]/m, '')\n encoders.each do | mod | \n if mod::HEADER == code[0,mod::HEADER.length]\n\treturn mod.decode(code[mod::HEADER.length..-1])\n end\n if mod::HEADER_COMPRESSED == code[0,mod::HEADER_COMPRESSED.length]\n\treturn mod.decode_compressed(code[mod::HEADER_COMPRESSED.length..-1])\n end\n end\n throw \"No matching decompression module found\"\n end",
"def header(h, buffer = Buffer.new)\n rep = HEADREP[h[:type]]\n\n case h[:type]\n when :indexed\n buffer << integer(h[:name] + 1, rep[:prefix])\n when :changetablesize\n buffer << integer(h[:value], rep[:prefix])\n else\n if h[:name].is_a? Integer\n buffer << integer(h[:name] + 1, rep[:prefix])\n else\n buffer << integer(0, rep[:prefix])\n buffer << string(h[:name])\n end\n\n buffer << string(h[:value])\n end\n\n # set header representation pattern on first byte\n fb = buffer.ord | rep[:pattern]\n buffer.setbyte(0, fb)\n\n buffer\n end",
"def load_dataE(filename)\r\n Log.ger.debug('Read '+filename)\r\n File.open(filename, \"rb\") { |f|\r\n obj = Marshal.load(f)\r\n }\r\nend",
"def recover_object\n # Up to the tab is the amount of bytes to read\n demarshal_bytes = @io.gets(\"\\t\").to_i\n \n # When at end of IO return nil\n raise NothingToRecover if demarshal_bytes.zero?\n \n blob = @io.read(demarshal_bytes)\n demarshal_object(blob)\n end",
"def partially_decompress(compressed)\n # Keep regex bracketed because of Lint/AmbiguousRegexpLiteral\n if (parsed = compressed.match(/(.*?)\\((\\d+)x(\\d+)\\)(.*)/))\n safe, length, times, rest = parsed.captures\n length = length.to_i\n times = times.to_i\n\n left, right = rest.split_at length\n safe + left * times + partially_decompress(right)\n else\n compressed\n end\n end",
"def ingest_object(row)\n\n @touch = File.join(\"/tmp\", row[\"exportedAs\"])\n \n unless File.exists?(@touch)\n obj = File.join(@directory, File.basename(row[\"exportedAs\"].gsub('\\\\', '/')))\n sourceFile = File.join(obj,File.basename(row[\"exportedAs\"].gsub('\\\\', '/')))\n \n if File.exists?(obj)\n # Gets a new PID\n pid = Nokogiri::XML(open(@fedora_uri + \"/management/getNextPID?xml=true&namespace=#{@fedora_ns}\", {:http_basic_authentication=>[@fedora_user, @fedora_pass]})).xpath(\"//pid\").text\n \n #testing stuff\n #pid = \"druid:1\"\n \n fedora_obj = AimsDocument.new(:pid => pid)\n fedora_obj.label = File.basename(obj)\n fedora_obj.save\n print obj + \" ===> \"\n # now glob the object directory and makes datastreams for each of the files and add them as datastream to the fedora object.\n # fedora_obj.save\n \n dsid = 'rightsMetadata'\n xml_content = fedora_obj.datastreams_in_memory[dsid].content\n ds = Hydra::RightsMetadata.from_xml(xml_content)\n pid = fedora_obj.pid\n ds.pid = pid\n ds.dsid = dsid\n fedora_obj.datastreams_in_memory[dsid] = ds\n permissions = {\"group\"=>{\"public\"=>\"read\", \"archivist\" => \"edit\", \"researcher\" => \"read\", \"patron\" => 'read', \"donor\" => 'edit' }, \"person\" => {\"archivist1\" => \"edit\"}}\n ds.update_permissions(permissions)\n permissions = {\"group\" => {\"public\"=>\"read\"}}\n ds.update_permissions(permissions)\n \n fedora_obj.save\n \n Dir[\"#{obj}/**/**\"].each do |f|\n \n #damn OS X spotlight. \n unless f.include?('DS_Store')\n \n # text files and jp2000s get added as datastreams in the object. the wordperfect files get added as their own objects\n if f =~ /(.*)\\.(txt)/\n fedora_obj.add_datastream(create_file_ds(f, File.basename(f), File.basename(f)))\n \n elsif f =~ /(.*)\\.(pdf)/\n fedora_obj.add_datastream(create_file_ds(f, 'pdf', \"#{File.basename(f)}.pdf\"))\n elsif f =~ /(.*)\\.(jp2)/\n # Below is if you want to not have the jp2 imported into fedora. it will just move them to a directory.\n #jp2_dir = File.join('/tmp', fedora_obj.pid.gsub(\"druid:\", \"druid_\"))\n #FileUtils.mkdir_p(jp2_dir) unless File.directory?(jp2_dir)\n #FileUtils.cp(f, jp2_dir, :verbose => true)\n # Below this adds the jp2000s into fedora.\n fedora_obj.add_datastream(create_file_ds(f, File.basename(f), File.basename(f)))\n\t\t elsif f == sourceFile #source file gets its own fedora object. \n cpid = Nokogiri::XML(open(@fedora_uri + \"/management/getNextPID?xml=true&namespace=#{@fedora_ns}\", {:http_basic_authentication=>[@fedora_user, @fedora_pass]})).xpath(\"//pid\").text\n \n child_obj = FileAsset.new(:pid => cpid)\n child_obj.label = File.basename(f)\n dc = child_obj.datastreams['descMetadata']\n dc.extent_values << File.size(f)\n \n \n fedora_obj.add_relationship(:has_part, child_obj )\n fedora_obj.add_relationship(:has_collection_member, child_obj)\n puts \"processing:#{f} for objectID #{cpid}\"\n ext = File.extname(f)\n id = \"DS1\"\n label = File.basename(f)\n child_obj.add_datastream(create_file_ds(f, id, label ))\n child_obj.save\n print f + \"\\n\"\n else\n puts \"not a file to ingest ==> #{f}\"\n end #if\n end #unless\n end #dir\n \n dm = fedora_obj.datastreams[\"descMetadata\"]\n prop = fedora_obj.datastreams[\"properties\"]\n \n labels = row[\"labels\"].split(',')\n \n loutput = {\"subjects\" => [], \"access\" => []}\n doc_values = { \"D\" => \"Document\", \"S\" => \"Spreadsheet\", \"E\" => \"Email\", \"IM\" => \"Image\", \"V\" => \"Video\", \"SO\" => \"Sound\"} \n comp_values = {\"CM:5.25\" => \"5.25 inch. floppy diskettes\", \"CM:3.5\" => \"3.5 inch. floppy diskettes\", \"CM:P\" => \"Punch cards\", \"CM:T\" => \"Tape\" }\n access_values = {\"O\" => \"owner\", \"A\" => \"Archivists\", \"I\" => \"Invited\", \"P\" =>\"Public\", \"M\"=>\"Reading\"}\n \n \n labels.each do |l|\n if doc_values.has_key?(l)\n loutput[\"doctype\"] = doc_values[l]\n elsif comp_values.has_key?(l)\n loutput[\"mediatype\"] = comp_values[l]\n elsif access_values.has_key?(l)\n loutput[\"access\"] << access_values[l]\n elsif l.include?(\"S:\")\n loutput[\"subjects\"] << l.gsub(\"S:\", '') \n end #if\n end #do\n \n pp(loutput)\n prop.collection_values << \"Steven J. Gould\"\n prop.pages_values << number_of_pages(fedora_obj)\n prop.path_values << row['path']\n prop.file_size_values << row['size']\n prop.md5_values << row['md5']\n prop.sha1_values << row['sha1']\n prop.file_type_values << row['type']\n prop.filename_values << File.basename(obj)\n \n dm.isPartOf_values = row[\"subseries\"].gsub(/[0-9]|Bookmark:|\\./,\"\").strip\n dm.source_values << row['filename']\n dm.type_values << loutput['doctype']\n dm.format_values << loutput[\"mediatype\"]\n \n \n \n loutput['subjects'].each { |s| dm.subject_values << s.gsub(\"S:\", \"\") }\n \n dm.save\n prop.save\n fedora_obj.save\n\n solr_doc = fedora_obj.to_solr\n solr_doc << Solr::Field.new( :discover_access_group_t => \"public\" )\n ActiveFedora::SolrService.instance.conn.update(solr_doc)\n FileUtils.mkdir_p(@touch)\n end #unless\n end #if exists? \n rescue Exception => e \n puts e.backtrace\n\tputs \"erroring....\"\n sleep(300)\n return nil\n rescue Timeout::Error => e\n puts \"timeout error ....\" \n sleep(350)\n\n \n end",
"def create_file_header_and_data(path, is_payload, is_custom_payload)\r\n #print_status(\"Length of #{path}: #{path.length}\")\r\n if is_payload and is_custom_payload\r\n file_data = File.binread(path.from(72))\r\n elsif is_payload and !is_custom_payload\r\n file_data = generate_payload_exe\r\n else\r\n file_data = File.binread(File.basename(path))\r\n end\r\n\r\n file_data_crc32 = crc32(file_data).to_i\r\n\r\n # HEAD_CRC: Lower 2 bytes of CRC32 of the next bytes of header after HEAD_TYPE.\r\n # The bogus value for HEAD_CRC will be replaced later.\r\n file_header = \"\"\r\n file_header << \"AA\"\r\n # HEAD_SIZE: file header size.\r\n if is_payload\r\n file_header << [31 + path.length].pack(\"v\")\r\n else\r\n file_header << [31 + ::File.basename(path).length].pack(\"v\")\r\n end\r\n # HEAD_TYPE: header type is 1.\r\n file_header << \"\\x01\"\r\n # HEAD_FLAGS: header flags. \\x01\\x80 is ADDSIZE|SOLID.\r\n file_header << \"\\x01\\x80\"\r\n # PACK_SIZE: size when packed.\r\n file_header << [file_data.length].pack(\"V\")\r\n #print_status(\"#{file_data.length}\")\r\n # ORIG_SIZE: original size. Same as PACK_SIZE since no compression is *truly* taking place.\r\n file_header << [file_data.length].pack(\"V\")\r\n # FTIME: file date and time in MS-DOS format\r\n file_header << \"\\x63\\xB0\\x55\\x4E\"\r\n # ATTR: DOS/Windows file attribute bit field, as int, as produced by the Windows GetFileAttributes() API.\r\n file_header << \"\\x20\\x00\\x00\\x00\"\r\n # CRC32: CRC32 of the compressed file\r\n file_header << [file_data_crc32].pack(\"V\")\r\n # Compression type\r\n file_header << \"\\x00\"\r\n # Compression quality\r\n file_header << \"\\x03\"\r\n # Parameter for decompression\r\n file_header << \"\\x0A\\x00\"\r\n # RESERVED1\r\n file_header << \"\\x54\\x45\"\r\n # FNAME_SIZE: size of filename string\r\n if is_payload\r\n file_header << [path.length].pack(\"v\")\r\n else\r\n # print_status(\"#{::File.basename(path).length}\")\r\n file_header << [::File.basename(path).length].pack(\"v\")\r\n end\r\n #file_header << [path.length].pack(\"v\")\r\n # FNAME: filename string. Empty for now. Fill in later.\r\n if is_payload\r\n file_header << path\r\n else\r\n file_header << ::File.basename(path)\r\n end\r\n\r\n #print_status(\"Calculating other_file_header...\")\r\n file_header_crc32 = crc32(file_header[4, file_header.length]).to_s(16)\r\n file_header_crc16 = file_header_crc32.last(4).to_i(base=16)\r\n file_header[0,2] = [file_header_crc16].pack(\"v\")\r\n file_header << file_data\r\n end",
"def initial_parse(binstr)\n return false unless binstr\n\n if binstr[0, 2] != self.class.const_get(:HEADER_ID)\n warn 'WARNING: weird extra field header ID. Skip parsing it.'\n return false\n end\n\n [binstr[2, 2].unpack1('v'), binstr[4..-1]]\n end",
"def sha1?; @sha1; end",
"def get_chunk(object)\n chunk_headers = get_block_data(object, /<</, />>/, 2)\n\n return { :header => chunk_headers[0], :data => get_stream_data(object) } unless(chunk_headers.empty?)\n end",
"def parse_multifile(filename: \"\", type: \"old\")\r\n !filename.empty? ? file = File.binread(filename) : return\r\n case type\r\n when \"old\"\r\n file.split(\"\\n\").map(&:strip).reject(&:empty?).map{ |m|\r\n title = m.split('#')[0][1..-1] rescue \"\"\r\n author = \"Metanet Software\"\r\n map = parse_map(data: m.split(\"#\")[1], type: \"old\") rescue {tiles: [], objects: []}\r\n {title: title, author: author, tiles: map[:tiles], objects: map[:objects]}\r\n }\r\n else\r\n print(\"ERROR: Incorrect type (old).\")\r\n return 0\r\n end\r\nend",
"def compute_parts(objects, multipart_upload)\n objects.map.with_index do |object, idx|\n {\n bucket: multipart_upload.bucket_name,\n key: multipart_upload.object_key,\n upload_id: multipart_upload.id,\n copy_source: [object.bucket_name, object.key].join(\"/\"),\n part_number: idx + 1,\n }\n end\n end",
"def unmarshall_buffer(buf)\n buf = buf.dup\n if buf[0] == ?l\n endianness = LIL_END\n else\n endianness = BIG_END\n end\n pu = PacketUnmarshaller.new(buf, endianness)\n dummy, @message_type, @flags, @protocol, @body_length, @serial,\n headers = pu.unmarshall(MESSAGE_SIGNATURE)\n headers.each do |struct|\n case struct[0]\n when PATH\n @path = struct[1]\n when INTERFACE\n @interface = struct[1]\n when MEMBER\n @member = struct[1]\n when ERROR_NAME\n @error_name = struct[1]\n when REPLY_SERIAL\n @reply_serial = struct[1]\n when DESTINATION\n @destination = struct[1]\n when SENDER\n @sender = struct[1]\n when SIGNATURE\n @signature = struct[1]\n end\n end\n pu.align(8)\n if @body_length > 0 and @signature\n @params = pu.unmarshall(@signature, @body_length)\n end\n [self, pu.idx]\n end",
"def sha1_fingerprint\n @sha1_fingerprint ||= @node['sha1Fingerprint']\n end",
"def smart_convert!\n if @file_headers.any?\n @data = @data.map { |d_arr| @file_headers.each_with_object({}).with_index { |(h_name, h_hash), ind| h_hash[h_name] = d_arr[ind] } }\n @smart_convert = true\n end\n end",
"def decompress\n header = buf.u32\n return :invalid_data if (header & 0xFF) != 0x11\n decompressedSize = header >> 8\n decompressedSize = buf.u32 if decompressedSize == 0\n\n bufferLength = 0x1000\n buffer = Array.new(bufferLength)\n bufferOffset = 0\n\n flags = 0\n mask = 1\n\n outbuf = []\n until outbuf.size >= decompressedSize\n if mask == 1\n flags = buf.u8\n return :stream_too_short if flags.nil?\n mask = 0x80\n else\n mask >>= 1\n end\n\n if (flags & mask) > 0\n byte1 = buf.u8\n return :stream_too_short if byte1.nil?\n\n length = byte1 >> 4\n disp = -1\n case length\n when 0\n byte2 = buf.u8\n byte3 = buf.u8\n return :stream_too_short if byte3.nil?\n length = (((byte1 & 0x0F) << 4) | (byte2 >> 4)) + 0x11\n disp = (((byte2 & 0x0F) << 8) | byte3) + 0x1\n when 1\n byte2 = buf.u8\n byte3 = buf.u8\n byte4 = buf.u8\n return :stream_too_short if byte4.nil?\n length = (((byte1 & 0x0F) << 12) | (byte2 << 4) | (byte3 >> 4)) + 0x111\n disp = (((byte3 & 0x0F) << 8) | byte4) + 0x1\n else\n byte2 = buf.u8\n return :stream_too_short if byte2.nil?\n length = ((byte1 & 0xF0) >> 4) + 0x1\n disp = (((byte1 & 0x0F) << 8) | byte2) + 0x1\n end\n\n return :invalid_data if disp > outbuf.size\n\n bufIdx = bufferOffset + bufferLength - disp\n length.times do\n next_byte = buffer[bufIdx % bufferLength]\n bufIdx += 1\n outbuf << next_byte\n buffer[bufferOffset] = next_byte\n bufferOffset = (bufferOffset + 1) % bufferLength\n end\n else\n next_byte = buf.u8\n return :stream_too_short if next_byte.nil?\n outbuf << next_byte\n buffer[bufferOffset] = next_byte\n bufferOffset = (bufferOffset + 1) % bufferLength\n end\n end\n\n outbuf\n end",
"def marshal_load(serialised); end",
"def marshal_load(array)\n initialize array[0]\n end",
"def parse_multifile(filename: \"\", type: \"old\")\n !filename.empty? ? file = File.binread(filename) : return\n case type\n when \"old\"\n file.split(\"\\n\").map(&:strip).reject(&:empty?).map{ |m|\n title = m.split('#')[0][1..-1] rescue \"\"\n author = \"Metanet Software\"\n map = parse_map(data: m.split(\"#\")[1], type: \"old\") rescue {tiles: [], objects: []}\n {title: title, author: author, tiles: map[:tiles], objects: map[:objects]}\n }\n else\n print(\"ERROR: Incorrect type (old).\")\n return 0\n end\nend",
"def core_sha1(x, len)\n # append padding\n x[len >> 5] ||= 0\n x[len >> 5] |= 0x80 << (24 - len % 32)\n x[((len + 64 >> 9) << 4) + 15] = len\n\n w = Array.new(80, 0)\n a = 1_732_584_193\n b = -271_733_879\n c = -1_732_584_194\n d = 271_733_878\n e = -1_009_589_776\n\n # for(var i = 0; i < x.length; i += 16)\n i = 0\n while i < x.length\n olda = a\n oldb = b\n oldc = c\n oldd = d\n olde = e\n\n # for(var j = 0; j < 80; j++)\n j = 0\n while j < 80\n if j < 16\n w[j] = x[i + j] || 0\n else\n w[j] = rol(w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16], 1)\n end\n\n t = safe_add(safe_add(rol(a, 5), sha1_ft(j, b, c, d)),\n safe_add(safe_add(e, w[j]), sha1_kt(j)))\n e = d\n d = c\n c = rol(b, 30)\n b = a\n a = t\n j += 1\n end\n\n a = safe_add(a, olda)\n b = safe_add(b, oldb)\n c = safe_add(c, oldc)\n d = safe_add(d, oldd)\n e = safe_add(e, olde)\n i += 16\n end\n [a, b, c, d, e]\n end",
"def update!(**args)\n @algorithm = args[:algorithm] if args.key?(:algorithm)\n @bigstore_object_ref = args[:bigstore_object_ref] if args.key?(:bigstore_object_ref)\n @blob_ref = args[:blob_ref] if args.key?(:blob_ref)\n @blobstore2_info = args[:blobstore2_info] if args.key?(:blobstore2_info)\n @composite_media = args[:composite_media] if args.key?(:composite_media)\n @content_type = args[:content_type] if args.key?(:content_type)\n @content_type_info = args[:content_type_info] if args.key?(:content_type_info)\n @cosmo_binary_reference = args[:cosmo_binary_reference] if args.key?(:cosmo_binary_reference)\n @crc32c_hash = args[:crc32c_hash] if args.key?(:crc32c_hash)\n @diff_checksums_response = args[:diff_checksums_response] if args.key?(:diff_checksums_response)\n @diff_download_response = args[:diff_download_response] if args.key?(:diff_download_response)\n @diff_upload_request = args[:diff_upload_request] if args.key?(:diff_upload_request)\n @diff_upload_response = args[:diff_upload_response] if args.key?(:diff_upload_response)\n @diff_version_response = args[:diff_version_response] if args.key?(:diff_version_response)\n @download_parameters = args[:download_parameters] if args.key?(:download_parameters)\n @filename = args[:filename] if args.key?(:filename)\n @hash_prop = args[:hash_prop] if args.key?(:hash_prop)\n @hash_verified = args[:hash_verified] if args.key?(:hash_verified)\n @inline = args[:inline] if args.key?(:inline)\n @is_potential_retry = args[:is_potential_retry] if args.key?(:is_potential_retry)\n @length = args[:length] if args.key?(:length)\n @md5_hash = args[:md5_hash] if args.key?(:md5_hash)\n @media_id = args[:media_id] if args.key?(:media_id)\n @object_id_prop = args[:object_id_prop] if args.key?(:object_id_prop)\n @path = args[:path] if args.key?(:path)\n @reference_type = args[:reference_type] if args.key?(:reference_type)\n @sha1_hash = args[:sha1_hash] if args.key?(:sha1_hash)\n @sha256_hash = args[:sha256_hash] if args.key?(:sha256_hash)\n @timestamp = args[:timestamp] if args.key?(:timestamp)\n @token = args[:token] if args.key?(:token)\n end"
] | [
"0.5682494",
"0.56126815",
"0.5607991",
"0.5605144",
"0.54417396",
"0.539379",
"0.5384476",
"0.53790593",
"0.53275424",
"0.52762884",
"0.5268248",
"0.5265314",
"0.5241195",
"0.52087337",
"0.5157022",
"0.5052081",
"0.5038033",
"0.5030748",
"0.5024052",
"0.49997893",
"0.49997893",
"0.49598372",
"0.49516672",
"0.49507946",
"0.4948029",
"0.49414468",
"0.4919305",
"0.49138194",
"0.4909138",
"0.49066204",
"0.49019277",
"0.48998278",
"0.48929408",
"0.48772705",
"0.48630947",
"0.482732",
"0.48260745",
"0.48099557",
"0.48002782",
"0.47981724",
"0.47955847",
"0.47953853",
"0.47893035",
"0.4785981",
"0.47726196",
"0.47685096",
"0.47600877",
"0.4757102",
"0.47362128",
"0.47332534",
"0.47280085",
"0.47187084",
"0.4718107",
"0.47160825",
"0.47144115",
"0.47094163",
"0.4688039",
"0.46842548",
"0.46824715",
"0.4681738",
"0.46814832",
"0.4678529",
"0.46743304",
"0.46671367",
"0.4664662",
"0.46595794",
"0.46532515",
"0.46530613",
"0.46522617",
"0.4651476",
"0.46461028",
"0.46420023",
"0.46319276",
"0.46296614",
"0.46276146",
"0.46195826",
"0.4619016",
"0.4618673",
"0.460238",
"0.46023193",
"0.46002838",
"0.45994225",
"0.45935062",
"0.45934755",
"0.45927307",
"0.45870212",
"0.45857766",
"0.45679256",
"0.4564868",
"0.45627022",
"0.45599544",
"0.4556474",
"0.45531005",
"0.45505387",
"0.4549523",
"0.45463714",
"0.4543337",
"0.45401606",
"0.4532922",
"0.45326805",
"0.45293275"
] | 0.0 | -1 |
sha1_file.c get_delta_base unpackobjects.c unpack_delta_entry | def parse_delta_offset
offset = -1
hdr_size = 0
begin
hdr = byte
hdr_size += 1
continue = hdr & 0b10000000
low_offset = hdr & 0b01111111
offset = ((offset + 1) << 7) | low_offset
end while continue != 0
return [offset, hdr_size]
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def process_delta_entry(entry)\n path = entry[0]\n metadata = entry[1]\n photo = dropbox_event.dropbox_photos.find_by_path(path)\n if !metadata\n Rails.logger.debug(\"Destroying photo with path #{path}\")\n photo.destroy if photo.present?\n elsif /\\.((jpg)|(jpeg)|(png))$/ =~ path\n contents = client.get_file_and_metadata(path, metadata['rev'])\n tmp = Tempfile.new 'image'\n begin\n tmp.binmode\n tmp.write contents\n ensure\n tmp.close\n tmp.unlink\n end\n if photo\n if photo.rev != metadata['rev'] # need to update\n photo.photo = tmp\n photo.rev = metadata['rev']\n photo.save\n end\n else\n photo = DropboxPhoto.create(path: path,\n rev: metadata['rev'],\n dropbox_event: dropbox_event)\n photo.photo = tmp\n photo.save\n end\n end\n end",
"def patch(diffs)\n @hash = nil # invalidate any cached image\n\n Dir.chdir(root) do\n diffs.each do |diff|\n flag, key, v1, _ = diff\n # if key =~ /\\[/\n # keyname = key.match(/^(.*)\\[\\]$/).captures\n # elsif key =~ /\\./\n # keyname, subkey = key.match(/^(.*)\\.(.*)$/).captures\n # else\n # keyname = key\n # end\n\n dirname, filename, fieldname = Treet::Repo.filefor(key)\n filepath = \"#{dirname}/#{filename}\"\n\n case flag\n when '~'\n # change a value in place\n # load the current data & overwrite with the new value\n # idempotent: this will overwrite the file with the same contents\n if fieldname\n # hash entry\n data = File.exists?(filepath) ? JSON.load(File.open(filepath)) : {}\n data[fieldname] = v1\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n else\n # string entry\n File.open(filepath, \"w\") {|f| f << v1}\n end\n\n when '+'\n # add something\n if fieldname\n # writing a value into a hash\n # idempotent: this will overwrite the file with the same contents\n data = File.exists?(filepath) ? JSON.load(File.open(filepath)) : {}\n data[fieldname] = v1\n Dir.mkdir(dirname) unless Dir.exists?(dirname)\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n else\n # writing an entire hash into an array entry\n # idempotent: this will overwrite the file with the same contents\n subfile = \"#{dirname}/#{Treet::Hash.digestify(v1)}\"\n Dir.mkdir(dirname) unless Dir.exists?(dirname)\n case v1\n when Hash\n # hash entry\n File.open(subfile, \"w\") {|f| f << JSON.pretty_generate(v1)}\n else\n # string entry - create empty file with this name\n FileUtils.touch(subfile)\n end\n end\n\n when '-'\n # remove something\n if fieldname\n # this is a key in a subhash\n if File.exists?(filepath)\n # if the subhash is missing, there's nothing to remove, so do nothing (for idempotence)\n data = JSON.load(File.open(filepath))\n data.delete(fieldname)\n if data.empty?\n # all keys have been removed, clean up the file\n File.delete(filename)\n else\n File.open(filepath, \"w\") {|f| f << JSON.pretty_generate(data)}\n end\n end\n elsif dirname == \".\"\n # this is a top-level string\n File.delete(filename) if File.exists?(filename) # need the existence check for idempotence\n else\n # this is an array, we look for a match on the entire contents via digest\n subfile = \"#{dirname}/#{Treet::Hash.digestify(v1)}\"\n File.delete(subfile) if File.exists?(subfile) # need the existence check for idempotence\n # TODO: if dirname is now empty, should it be removed? is that worthwhile?\n end\n end\n end\n end\n\n to_hash # ?? return the patched data? or no return value? true/false for success?\n end",
"def zdiffstore(*args, **_arg1); end",
"def get(id)\n raise ArgumentError, 'Invalid id given' if !(String === id)\n\n if id =~ SHA_PATTERN\n raise ArgumentError, \"Sha too short: #{id}\" if id.length < 5\n\n trie = @objects.find(id)\n raise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n return trie.value if !trie.empty?\n elsif id =~ REVISION_PATTERN\n list = git_rev_parse(id).split(\"\\n\") rescue nil\n raise NotFound, \"Revision not found: #{id}\" if !list || list.empty?\n raise NotFound, \"Revision is ambiguous: #{id}\" if list.size > 1\n id = list.first\n\n trie = @objects.find(id)\n raise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n return trie.value if !trie.empty?\n else\n raise ArgumentError, \"Invalid id given: #{id}\"\n end\n\n @logger.debug \"gitrb: Loading #{id}\"\n\n path = object_path(id)\n if File.exists?(path) || (glob = Dir.glob(path + '*')).size >= 1\n if glob\n raise NotFound, \"Sha is ambiguous: #{id}\" if glob.size > 1\n path = glob.first\n id = path[-41..-40] + path[-38..-1]\n end\n\n buf = File.open(path, 'rb') { |f| f.read }\n\n raise NotFound, \"Not a loose object: #{id}\" if !legacy_loose_object?(buf)\n\n header, content = Zlib::Inflate.inflate(buf).split(\"\\0\", 2)\n type, size = header.split(' ', 2)\n\n raise NotFound, \"Bad object: #{id}\" if content.length != size.to_i\n else\n trie = @packs.find(id)\n\traise NotFound, \"Object not found: #{id}\" if trie.empty?\n\traise NotFound, \"Sha is ambiguous: #{id}\" if trie.size > 1\n id = trie.key\n pack, offset = trie.value\n content, type = pack.get_object(offset)\n end\n\n @logger.debug \"gitrb: Loaded #{type} #{id}\"\n\n set_encoding(id)\n object = GitObject.factory(type, :repository => self, :id => id, :data => content)\n @objects.insert(id, object)\n object\n end",
"def unpack(data); end",
"def contents_for(delta, type = :dst)\n ref = if type == :src\n delta.old_file[:oid]\n elsif type == :dst\n delta.new_file[:oid]\n end\n @git.lookup(ref).content if ref != \"0000000000000000000000000000000000000000\"\n end",
"def digestFile(filename)\n\tfh = File.open(filename)\n\tarray = String.new\n\tpreparse = true\n\tfh.each_line do |line|\n\t\tif preparse\n\t\t\tif line =~ /\\{/\n\t\t\t\tpreparse = false\n\t\t\t\tarray << line\n\t\t\tend\n\t\telse\n\t\t\t#Sub numberlongs\n\t\t\tif line.include? \"NumberLong\\(\"\n\t\t\t\tline.slice!(/NumberLong\\(/)\n\t\t\t\tline.slice!(/\\)/)\n\t\t\tend\n\n\t\t\t#ObjectId(\"4ef4af0963389003f300c2e7\"),\n\t\t\tif line.include? \"ObjectId\"\n\t\t\t\tline = line.gsub(\"ObjectId\\(\\\"\", \"\\\"ObjectId\\(\")\n\t\t\t\tline = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n\t\t\tend\n\n\t\t\t#Timestamp(10000, 27),\n\t\t\tif line.include? \": Timestamp\\(\"\n\t\t\t\tline = line.gsub(\"Timestamp\\(\", \"\\\"Timestamp\\(\")\n\t\t\t\tline = line.gsub(\"\\)\", \"\\)\\\"\")\n\t\t\tend\n\t\t\t#ISODate(\"2012-01-26T00:00:00Z\")\n\t\t\tif line.include? \": ISODate\\(\\\"\"\n\t\t\t\tline = line.gsub(\"ISODate\\(\\\"\", \"\\\"ISODate\\(\")\n\t\t\t\tline = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n\t\t\tend\n #BinData\n if line.include? \": BinData\\(\"\n line = line.gsub(\"BinData\\(\", \"\\\"BinData\\(\")\n line = line.gsub(\"\\\"\\)\", \"\\)\\\"\")\n line = line.gsub(\",\\\"\", \",\")\n end\n if line.include? \"\\\" : \\/\"\n line = line.gsub(\"\\\" : \\/\", \"\\\" : \\\"\\/\")\n line = line.gsub(\"\\/,\", \"\\/\\\",\")\n end\n\t\t\tif line !~ /bye/\n\t\t\t array << line\n\t\t\tend\n\t\tend\n\tend\n\tfh.close\n\tdoc = Yajl::Parser.parse(array)\n\treturn doc\nend",
"def read_from_blobs_file(entry)\n begin\n raw = File.read(@blobs_file_name, entry[BYTES], entry[START])\n rescue => e\n PEROBS.log.fatal \"Cannot read blobs file #{@blobs_file_name}: \" +\n e.message\n end\n if Zlib.crc32(raw, 0) != entry[CRC]\n PEROBS.log.fatal \"BTreeBlob for object #{entry[ID]} has been \" +\n \"corrupted: Checksum mismatch\"\n end\n\n raw\n end",
"def git_object(hash)\n return `git cat-file -p #{hash}`\nend",
"def main\n last_good_root = from_file($cache_dir + '/root.txt') ||\n from_file('config/root.txt') ||\n raise(\"Can't find root.txt\")\n\n repository = Gem::TUF::Repository.new(\n root: JSON.parse(last_good_root),\n bucket: FileCachingBucket.new(HttpBucket.new($host))\n )\n\n gem_name = ARGV.shift\n\n specs = repository.target('latest_specs.4.8.gz')\n raise \"could not find latest_specs.4.8.gz\" unless specs\n specs = unmarshal_gz specs\n gem = specs.detect {|x| x[0] == gem_name } || raise(\"Can't find gem #{gem}\")\n\n gem_with_version = \"#{gem[0]}-#{gem[1]}\"\n gem_path = \"gems/#{gem_with_version}.gem\"\n gemspec_path = \"quick/Marshal.4.8/#{gem_with_version}.gemspec.rz\"\n\n repository.target(gemspec_path)\n repository.target(gem_path)\n\n puts \"Downloaded #{gem_path} and #{gemspec_path}\"\nend",
"def ingest_object(row)\n\n @touch = File.join(\"/tmp\", row[\"exportedAs\"])\n \n unless File.exists?(@touch)\n obj = File.join(@directory, File.basename(row[\"exportedAs\"].gsub('\\\\', '/')))\n sourceFile = File.join(obj,File.basename(row[\"exportedAs\"].gsub('\\\\', '/')))\n \n if File.exists?(obj)\n # Gets a new PID\n pid = Nokogiri::XML(open(@fedora_uri + \"/management/getNextPID?xml=true&namespace=#{@fedora_ns}\", {:http_basic_authentication=>[@fedora_user, @fedora_pass]})).xpath(\"//pid\").text\n \n #testing stuff\n #pid = \"druid:1\"\n \n fedora_obj = AimsDocument.new(:pid => pid)\n fedora_obj.label = File.basename(obj)\n fedora_obj.save\n print obj + \" ===> \"\n # now glob the object directory and makes datastreams for each of the files and add them as datastream to the fedora object.\n # fedora_obj.save\n \n dsid = 'rightsMetadata'\n xml_content = fedora_obj.datastreams_in_memory[dsid].content\n ds = Hydra::RightsMetadata.from_xml(xml_content)\n pid = fedora_obj.pid\n ds.pid = pid\n ds.dsid = dsid\n fedora_obj.datastreams_in_memory[dsid] = ds\n permissions = {\"group\"=>{\"public\"=>\"read\", \"archivist\" => \"edit\", \"researcher\" => \"read\", \"patron\" => 'read', \"donor\" => 'edit' }, \"person\" => {\"archivist1\" => \"edit\"}}\n ds.update_permissions(permissions)\n permissions = {\"group\" => {\"public\"=>\"read\"}}\n ds.update_permissions(permissions)\n \n fedora_obj.save\n \n Dir[\"#{obj}/**/**\"].each do |f|\n \n #damn OS X spotlight. \n unless f.include?('DS_Store')\n \n # text files and jp2000s get added as datastreams in the object. the wordperfect files get added as their own objects\n if f =~ /(.*)\\.(txt)/\n fedora_obj.add_datastream(create_file_ds(f, File.basename(f), File.basename(f)))\n \n elsif f =~ /(.*)\\.(pdf)/\n fedora_obj.add_datastream(create_file_ds(f, 'pdf', \"#{File.basename(f)}.pdf\"))\n elsif f =~ /(.*)\\.(jp2)/\n # Below is if you want to not have the jp2 imported into fedora. it will just move them to a directory.\n #jp2_dir = File.join('/tmp', fedora_obj.pid.gsub(\"druid:\", \"druid_\"))\n #FileUtils.mkdir_p(jp2_dir) unless File.directory?(jp2_dir)\n #FileUtils.cp(f, jp2_dir, :verbose => true)\n # Below this adds the jp2000s into fedora.\n fedora_obj.add_datastream(create_file_ds(f, File.basename(f), File.basename(f)))\n\t\t elsif f == sourceFile #source file gets its own fedora object. \n cpid = Nokogiri::XML(open(@fedora_uri + \"/management/getNextPID?xml=true&namespace=#{@fedora_ns}\", {:http_basic_authentication=>[@fedora_user, @fedora_pass]})).xpath(\"//pid\").text\n \n child_obj = FileAsset.new(:pid => cpid)\n child_obj.label = File.basename(f)\n dc = child_obj.datastreams['descMetadata']\n dc.extent_values << File.size(f)\n \n \n fedora_obj.add_relationship(:has_part, child_obj )\n fedora_obj.add_relationship(:has_collection_member, child_obj)\n puts \"processing:#{f} for objectID #{cpid}\"\n ext = File.extname(f)\n id = \"DS1\"\n label = File.basename(f)\n child_obj.add_datastream(create_file_ds(f, id, label ))\n child_obj.save\n print f + \"\\n\"\n else\n puts \"not a file to ingest ==> #{f}\"\n end #if\n end #unless\n end #dir\n \n dm = fedora_obj.datastreams[\"descMetadata\"]\n prop = fedora_obj.datastreams[\"properties\"]\n \n labels = row[\"labels\"].split(',')\n \n loutput = {\"subjects\" => [], \"access\" => []}\n doc_values = { \"D\" => \"Document\", \"S\" => \"Spreadsheet\", \"E\" => \"Email\", \"IM\" => \"Image\", \"V\" => \"Video\", \"SO\" => \"Sound\"} \n comp_values = {\"CM:5.25\" => \"5.25 inch. floppy diskettes\", \"CM:3.5\" => \"3.5 inch. floppy diskettes\", \"CM:P\" => \"Punch cards\", \"CM:T\" => \"Tape\" }\n access_values = {\"O\" => \"owner\", \"A\" => \"Archivists\", \"I\" => \"Invited\", \"P\" =>\"Public\", \"M\"=>\"Reading\"}\n \n \n labels.each do |l|\n if doc_values.has_key?(l)\n loutput[\"doctype\"] = doc_values[l]\n elsif comp_values.has_key?(l)\n loutput[\"mediatype\"] = comp_values[l]\n elsif access_values.has_key?(l)\n loutput[\"access\"] << access_values[l]\n elsif l.include?(\"S:\")\n loutput[\"subjects\"] << l.gsub(\"S:\", '') \n end #if\n end #do\n \n pp(loutput)\n prop.collection_values << \"Steven J. Gould\"\n prop.pages_values << number_of_pages(fedora_obj)\n prop.path_values << row['path']\n prop.file_size_values << row['size']\n prop.md5_values << row['md5']\n prop.sha1_values << row['sha1']\n prop.file_type_values << row['type']\n prop.filename_values << File.basename(obj)\n \n dm.isPartOf_values = row[\"subseries\"].gsub(/[0-9]|Bookmark:|\\./,\"\").strip\n dm.source_values << row['filename']\n dm.type_values << loutput['doctype']\n dm.format_values << loutput[\"mediatype\"]\n \n \n \n loutput['subjects'].each { |s| dm.subject_values << s.gsub(\"S:\", \"\") }\n \n dm.save\n prop.save\n fedora_obj.save\n\n solr_doc = fedora_obj.to_solr\n solr_doc << Solr::Field.new( :discover_access_group_t => \"public\" )\n ActiveFedora::SolrService.instance.conn.update(solr_doc)\n FileUtils.mkdir_p(@touch)\n end #unless\n end #if exists? \n rescue Exception => e \n puts e.backtrace\n\tputs \"erroring....\"\n sleep(300)\n return nil\n rescue Timeout::Error => e\n puts \"timeout error ....\" \n sleep(350)\n\n \n end",
"def main\n # TODO: Check for expiry across all metadata files.\n\n last_good_root = from_file('root.txt') || get_metadata('root.txt')\n\n timestamp = get_metadata 'timestamp.txt'\n verify! timestamp, extract_keys(last_good_root, 'timestamp')\n\n release = get_hashed_metadata(\"release.txt\", timestamp['signed']['meta'])\n verify! release, extract_keys(last_good_root, 'release')\n\n # TODO: If our release hasn't changed from last known good, don't need to\n # refetch root.txt and targets.txt\n\n root = get_hashed_metadata(\"root.txt\", release['signed']['meta'])\n verify! root, extract_keys(last_good_root, \"root\")\n\n store_file 'root.txt', root\n last_good_root = root\n\n targets = get_hashed_metadata(\"targets.txt\", release['signed']['meta'])\n verify! targets, extract_keys(last_good_root, \"targets\")\n\n gem_name = ARGV[0]\n\n specs = unmarshal_gz(get_hashed_target(\"latest_specs.4.8.gz\", targets['signed']['targets']))\n\n gem = specs.detect {|x| x[0] == gem_name } || raise(\"Can't find gem #{gem}\")\n\n gem_with_version = \"#{gem[0]}-#{gem[1]}\"\n gem_path = \"gems/#{gem_with_version}.gem\"\n gemspec_path = \"quick/Marshal.4.8/#{gem_with_version}.gemspec.rz\"\n\n get_target(gemspec_path, targets['signed']['targets'])\n get_target(gem_path, targets['signed']['targets'])\n\n puts \"Downloaded #{gem_path} and #{gemspec_path}\"\nend",
"def object_for_hash(given_hash)\n @opener.open(name, \"r\") do |fp|\n given_hash.force_encoding(\"ASCII-8BIT\") if given_hash.respond_to?(:force_encoding)\n entry = nil\n if index\n starting_at = index.offset_for_hash(given_hash)\n return PackFileEntry.at(starting_at, fp).to_raw_object\n else\n starting_at = cached_offset(given_hash) || DATA_START_OFFSET\n fp.seek(starting_at, IO::SEEK_SET)\n while fp.tell < @end_of_data\n entry = PackFileEntry.read(fp)\n cache_entry(entry)\n return entry.to_raw_object if entry.hash_id == given_hash\n end\n end\n end\n nil\n end",
"def pack_entry(entry, rev)\n entry = IndexEntry.new(*entry) if entry.kind_of? Array\n p = entry.to_s\n if rev == 0 || rev == 1\n p = [version].pack(VERSION_FORMAT) + p[4..-1] # initial entry\n end\n p\n end",
"def ingest (file, attrs = {})\n file = File.new(file, \"r\") if file.kind_of? String\n\n source_filename = File.basename file\n attrs[:name] = attrs[:name] || File.basename(file, \".*\")\n\n params = { input: source_filename }\n\n params.merge! attrs\n uri_params = Helper.to_request_params params\n\n # Create request object\n # TODO: This is not going to work with big files\n req = Net::HTTP::Post.new(\"/sources?\"+uri_params)\n req['Content-Length'] = File.size file\n req.body_stream = File.open file\n\n json_source = nil\n\n @lagoonserver.request_with_auth req do |resp|\n # Deal with the response. Response comes (mostly) in pairs:\n # - {\"start\": <some action>} <-- <some action> was started\n # - \"ok\" <-- the last <some action> finished\n #\n # We keep track of the state using a stack. Since we're getting packets\n # from the socket we need to keep track of the leftover, since we're\n # parsing lines.\n leftover = \"\"\n state = []\n resp.read_body do |segment|\n leftover = leftover + segment\n lines = leftover.lines\n\n # 'lines' will keep the '\\n' character. Whatever ends with '\\n' was\n # indeed a full line. The rest (which should be a single element array)\n # is the leftover.\n readys_leftover = lines.partition {|l| l.end_with? \"\\n\"}\n readys = readys_leftover[0].map(&:chomp)\n leftover = readys_leftover[1][0] || \"\"\n\n readys.each do |l|\n if l == \"\\\"ok\\\"\"\n # In case of \"ok\" we pop the last state and log that it's now done.\n log_info \"Done: #{state.pop}\"\n else\n # In case of a \"start\" we log the new action and push it on the\n # stack.\n new_blob = JSON.parse(l)\n if start_token = new_blob[\"start\"]\n # In case of a \"start\" we log the new action and push it on the\n # stack.\n log_info \"Start: \" + start_token\n state.push start_token\n elsif notice_token = new_blob[\"notice\"]\n # Sometimes it's a \"notice\"; very well, just log it.\n log_info \"Notice: \" + notice_token\n else\n # If it's neither \"ok\", a start or a notice, just log its JSON\n # representation and implicitly assume it might be the source\n # metadata.\n json_source = JSON.parse(l)\n log_info (JSON.pretty_generate json_source)\n end\n end\n end\n end\n end\n Source.new(json_source, self)\n end",
"def file_sha1\n Digest::SHA1.file(self).hexdigest\n end",
"def unpack(binary)\n end",
"def decompress(data); end",
"def untar_file(f, to)\n end",
"def sha1; end",
"def sha1; end",
"def build_object(type, content)\n # taken from http://schacon.github.io/gitbook/7_how_git_stores_objects.html\n header = \"#{type} #{content.size}\\0\"\n store = header + content\n [Digest::SHA1.hexdigest(store), Zlib::Deflate.deflate(store)]\n end",
"def pack_entry(entry, rev)\n entry = IndexEntry.new(*entry) if entry.kind_of? Array\n entry.fix_signs\n e2 = [RevlogSupport::Support.offset_type(entry.offset_flags), \n entry.compressed_len, entry.base_rev, entry.link_rev,\n @index[entry.parent_one_rev].node_id, \n @index[entry.parent_two_rev].node_id, entry.node_id]\n e2.pack(INDEX_FORMAT_V0)\n end",
"def unpack(*args)\n options = {}\n options = args.pop if args.last.is_a?(Hash)\n args.push \"--at #{options[:at].inspect}\" if options[:at]\n _run \"bun unpack\", *args\n end",
"def run (hdin)\n i = 0;\n dups = 0;\n puts \"Reading from #{hdin.path}\";\n\n # Opens temporary loadfile. Overwrites existing file.\n @loadfiles.values.each do |hdout|\n hdout.open('w');\n end\n\n # Open infile and process line.\n hdin.open('r').file.each_line do |line|\n i += 1;\n\n # Skip stuff that doesn't look like json.\n if !line.start_with?('{') then\n STDERR.puts \"Skipping line: #{line}\";\n next;\n # For testing purposes.\n # elsif i > 2000 then\n # puts \"ok we are done here\";\n # break;\n elsif i % 1000 == 0 then\n puts \"#{i} ...\";\n end\n\n # gd_id is the id used as primary key in hathi_gd\n # and foreign key in all other tables, connecting the snowflake.\n gd_id = nil;\n item_id = nil;\n line_hash = JSON.parse(line);\n\n # The file where the data originally came from, not the file currently being read.\n infile = line_hash['infile'];\n file_id = get_infile_id(infile);\n # We don't want to include lineno or mongo_id in digest, so we delete them from the hash.\n lineno = line_hash.delete('lineno');\n mongo_id = line_hash.delete('mongo_id');\n # hashsum goes in hathi_gd to make sure we don't put total dups in there.\n hashsum = @sha_digester.hexdigest(line_hash.to_json);\n\n # Not all records have a record id.\n rec_id = 'N/A';\n if !line_hash['record_id'].nil? then\n rec_id = line_hash['record_id'].first.values.first;\n\n if rec_id.length > @max_record_id_len then\n rec_id = rec_id[0..(@max_record_id_len-1)];\n end\n\n\n end\n\n if !line_hash['item_id'].nil? then\n if line_hash['item_id'].first.class == {}.class then\n item_id = line_hash['item_id'].first.values.first;\n if item_id.size > 50 then\n # Make sure we fit in the column.\n item_id = item_id[0..49];\n end\n end\n end\n\n # Get a gd_id from mysql.\n # Use it as primary key in hathi_gd.\n # Use it in all the tables (except hathi_str) as foreign key.\n begin\n @hathi_gd_insert_q.execute(1, file_id, lineno, mongo_id, hashsum, rec_id, item_id);\n @last_id_q.query() do |row|\n gd_id = row[:id];\n end\n rescue Java::ComMysqlJdbcExceptionsJdbc4::MySQLIntegrityConstraintViolationException => e\n if (e.to_s =~ /Duplicate entry.+for key 'hashsum'/) == 0 then\n dups += 1;\n next;\n else\n puts e;\n puts line;\n end\n end\n # If we got a gd_id, proceed to insert the rest.\n insert_line(line_hash, gd_id);\n end\n hdin.close();\n\n # When all the lines in the file have been read and the loadfiles are done,\n # use the loadfiles for their intended purpose.\n @loadfiles.keys.each do |suffix|\n loadfile = @loadfiles[suffix];\n loadfile.close();\n sql = \"LOAD DATA LOCAL INFILE ? REPLACE INTO TABLE hathi_#{suffix} (gd_id, str_id, marc_field)\";\n puts sql;\n query = @conn.prepare(sql);\n query.execute(loadfile.path);\n # loadfile.delete();\n end\n\n puts @bench.prettyprint();\n puts \"#{dups} dups\";\nend",
"def parse_multifile(filename: \"\", type: \"old\")\r\n !filename.empty? ? file = File.binread(filename) : return\r\n case type\r\n when \"old\"\r\n file.split(\"\\n\").map(&:strip).reject(&:empty?).map{ |m|\r\n title = m.split('#')[0][1..-1] rescue \"\"\r\n author = \"Metanet Software\"\r\n map = parse_map(data: m.split(\"#\")[1], type: \"old\") rescue {tiles: [], objects: []}\r\n {title: title, author: author, tiles: map[:tiles], objects: map[:objects]}\r\n }\r\n else\r\n print(\"ERROR: Incorrect type (old).\")\r\n return 0\r\n end\r\nend",
"def unpack1 format\n unpack(format).first\n end",
"def test_pull_a_new_file\n a.add(\"two\" => \"two content\").commit(\"a added two\")\n \n assert_equal \"two content\", a['two']\n assert_equal nil, b['two']\n \n b.pull\n \n assert_equal \"two content\", a['two']\n assert_equal \"two content\", b['two']\n \n assert_log_equal [\n \"a added one\",\n \"a added two\"\n ], b\n end",
"def read(entry); end",
"def import\n fragments.each(&:destroy)\n File.open(bin_file_name, \"rb\") do |f|\n order = 0\n new_lines = 0\n until f.eof?\n new_lines += import_next_object(f, order)\n order += 1\n end\n update! lines: new_lines\n end\n add_locations\n end",
"def parse_multifile(filename: \"\", type: \"old\")\n !filename.empty? ? file = File.binread(filename) : return\n case type\n when \"old\"\n file.split(\"\\n\").map(&:strip).reject(&:empty?).map{ |m|\n title = m.split('#')[0][1..-1] rescue \"\"\n author = \"Metanet Software\"\n map = parse_map(data: m.split(\"#\")[1], type: \"old\") rescue {tiles: [], objects: []}\n {title: title, author: author, tiles: map[:tiles], objects: map[:objects]}\n }\n else\n print(\"ERROR: Incorrect type (old).\")\n return 0\n end\nend",
"def ExtractInfoFromFileContents file_contents\ndata = file_contents.split('|')\n# Remove the first line\n# Remove the project_name from data\n# Remove the line below project_name\ndata = data.drop(3)\n\n# Now, everything is in 3-tuple <key, value, \"\\n\">\ndata.shift\nproject_name = data.first\nproject_name = project_name.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nrepo_url = data.first\nrepo_url = repo_url.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nhead_sha = data.first\nhead_sha = head_sha.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nnum_commits = data.first\nnum_commits = num_commits.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nnum_merges = data.first\nnum_merges = num_merges.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nreverts_msg = data.first\nreverts_msg = reverts_msg.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nreverts_complete = data.first\nreverts_complete = reverts_complete.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\nreverts_partial = data.first\nreverts_partial = reverts_partial.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\ncps_complete = data.first\ncps_complete = cps_complete.gsub(/\\s+/, \"\")\ndata = data.drop(3)\n\ncps_partial = data.first\ncps_partial = cps_partial.gsub(/\\s+/, \"\")\ndata = data.drop(2)\n\nproject_name_repo_url_head_sha_combined = project_name + \"\\n\" + repo_url + \"\\n\" + head_sha\nreturn [project_name_repo_url_head_sha_combined, num_commits, num_merges, reverts_msg, reverts_complete, reverts_partial, cps_complete, cps_partial]\n\nend",
"def inflate(data); end",
"def sha1=(_); end",
"def extract_pack\n io = Zlib::GzipReader.new(DataDragon.data_pack_path.open)\n\n Gem::Package::TarReader.new(io) do |tar|\n tar.each do |tarfile|\n destination_file = (DataDragon.data_unpacked_path + tarfile.full_name)\n\n if tarfile.directory?\n destination_file.mkpath\n else\n destination_directory = destination_file.dirname\n destination_directory.mkpath unless destination_directory.directory?\n destination_file.write(tarfile.read)\n end\n end\n end\n end",
"def diff1; end",
"def unpack(p0) end",
"def test_tar\n\tx = \"test_tar\"\n\t@output = @s.archive({ 'files'=> [@test_directory_1_Path], 'format'=>'tar' , 'recurse'=>false } )\n\t#puts @output['archiveFile']\n\t\n\t@testid= 1\n\tTar.open(@output['archiveFile'], File::RDONLY, 0644, Tar::GNU | Tar::VERBOSE) do |tar|\n while tar.read # or 'tar.each do ...'\n #puts tar.pathname\n\t\t\n\t\t\n # tar.print_long_ls\n\n if tar.reg? && tar.pathname!=\"test_directory_1/.DS_Store\" # regular file\n tar.extract_file('test')\n\t\t want = File.read(File.join(@testdir, tar.pathname))\n\t\t puts tar.pathname\n\t\t #asserting bar1,2,3 from tar file is same as original bar1,2,3\n\t\t assert_log( want, File.read('test'), $log, x, @testid)\n end\n end\n\n ##if extract all files\n #tar.extract_all\n end\n\n\n ##for gzip archive\n #Tar.gzopen('foo.tar.gz', ...\n\n ##for bzip2 archive\n #Tar.bzopen('foo.tar.bz2', ...\n \n \n \n end",
"def test_pull_a_new_file_into_a_modified_tree\n b.add(\"dir/three\" => \"three content\").commit(\"b added three\")\n a.add(\"dir/two\" => \"two content\").commit(\"a added two\")\n \n assert_equal \"two content\", a['dir/two']\n assert_equal nil, b['dir/two']\n assert_equal \"three content\", b['dir/three']\n \n b.pull\n \n assert_equal \"two content\", a['dir/two']\n assert_equal \"two content\", b['dir/two']\n assert_equal \"three content\", b['dir/three']\n \n assert_log_equal [\n \"a added one\",\n \"a added two\",\n \"b added three\", \n \"gitgo merge of origin/gitgo into gitgo\"\n ], b\n end",
"def dodiff(item)\n trmt = Tempfile.new([tolocalname(item, @itemkey)+'_remote_', '.lua'])\n tlcl = Tempfile.new([tolocalname(item, @itemkey)+'_local_', '.lua'])\n if item.has_key? :script then\n Pathname.new(tlcl.path).open('wb') do |io|\n io << item[:script]\n end\n else\n Pathname.new(tlcl.path).open('wb') do |io|\n io << item[:local_path].read\n end\n end\n df = \"\"\n begin\n download(Pathname.new(trmt.path), item)\n\n cmd = $cfg['diff.cmd'].shellsplit\n cmd << trmt.path.gsub(::File::SEPARATOR, ::File::ALT_SEPARATOR || ::File::SEPARATOR)\n cmd << tlcl.path.gsub(::File::SEPARATOR, ::File::ALT_SEPARATOR || ::File::SEPARATOR)\n\n df, _ = Open3.capture2e(*cmd)\n ensure\n trmt.close\n trmt.unlink\n tlcl.close\n tlcl.unlink\n end\n df\n end",
"def unpack\n trace :debug, \"Build: apktool extract: #{@tmpdir}/apk\"\n\n apktool = path('apktool.jar')\n\n Dir[path('core.*.apk')].each do |d|\n version = d.scan(/core.android.(.*).apk/).flatten.first\n\n if version == \"melt\" then\n trace :debug, \"-jar #{apktool} d -f #{d} -o #{@tmpdir}/apk.#{version}\"\n #CrossPlatform.exec \"java\", \"-jar #{apktool} if #{@tmpdir}/jelly.apk jelly\"\n CrossPlatform.exec \"java\", \"-jar #{apktool} d -f #{d} -o #{@tmpdir}/apk.#{version}\"\n else\n trace :debug, \"-jar #{apktool} d -f -s -r #{d} -o #{@tmpdir}/apk.#{version}\"\n CrossPlatform.exec \"java\", \"-jar #{apktool} d -f -s -r #{d} -o #{@tmpdir}/apk.#{version}\"\n end\n\n [\"rb.data\", \"cb.data\"].each do |asset|\n CrossPlatform.exec \"pwd\",\"\"\n exists = File.exist?(path(\"apk.#{version}/assets/#{asset}\"))\n trace :debug, \"check #{@tmpdir}/apk.#{version}/assets/#{asset} #{exists}\" \n\n raise \"unpack failed. needed asset #{asset} not found\" unless File.exist?(path(\"apk.#{version}/assets/#{asset}\"))\n end\n\n end\nend",
"def diff(other_sha1)\n git \"diff #{other_sha1} -- #{@path}\"\n end",
"def test_add_back_a_remotely_removed_file\n a.add(\"two\" => \"two content\").commit(\"a added two\")\n a.rm(\"two\").commit(\"a removed two\")\n b.add(\"two\" => \"two content\").commit(\"b added two\")\n \n assert_equal nil, a['two']\n assert_equal \"two content\", b['two']\n \n b.pull\n \n assert_equal nil, a['two']\n assert_equal \"two content\", b['two']\n \n assert_log_equal [\n \"a added one\",\n \"a added two\",\n \"a removed two\",\n \"b added two\", \n \"gitgo merge of origin/gitgo into gitgo\"\n ], b\n end",
"def unpack_one format\n @b.unpack_one format\n end",
"def decode_file file\n base = File.basename(file)\n # add the timestamp\n $results[file] = { values: {}, timestamp: Util.cdr2stamp(base) }\n\tcmd = $dec_cmd + \" \" + file + \" | tail -n 40\"\n\tres = `#{cmd} `\n if !$?.success?\n $stderr.puts \"Decoder failed.\"; \n abort; \n end;\n\tres.split(\"\\n\").each do |line|\n\t\tnext unless line.match $re\n\t\t$results[file][:values][$1] = $2\n\tend\nend",
"def get_object(id)\n path = object_path(id)\n\n if File.exists?(path)\n buf = open(path, \"rb\") { |f| f.read }\n\n raise \"not a loose object: #{id}\" if not legacy_loose_object?(buf)\n\n header, content = Zlib::Inflate.inflate(buf).split(/\\0/, 2)\n type, size = header.split(/ /, 2)\n\n raise \"bad object: #{id}\" if content.length != size.to_i\n else\n content, type = get_object_from_pack(id)\n end\n\n return type, content\n end",
"def diphot_metadata_to_h\n File.open(@qualified_filename, 'r') do |fd|\n diff = fd.readline.chomp\n reference = fd.readline.chomp\n @obj_metadata = { 'diff' => diff, 'reference' => reference }\n end\n end",
"def process_primary_files_with_changes_only\n # We get the diff only so that we know which files have changed.\n # It's ok to use the reference commits because we're dealing with\n # content AT files only.\n diff = @repository.diff(@from_git_commit, @to_git_commit, context_lines: 0)\n fwc = []\n diff.patches.each { |patch|\n file_name = patch.delta.old_file[:path]\n # Skip non content_at files\n next if !@file_list.include?(file_name)\n # next if !file_name.index('63-0728')\n unless file_name =~ /\\/content\\/.+\\d{4}\\.at\\z/\n raise \"shouldn't get here: #{ file_name.inspect }\"\n end\n\n @logger.info(\" - process #{ file_name }\")\n\n absolute_file_path = File.join(@repository.base_dir, file_name)\n # Initialize content AT file `to` with contents as of `to_git_commit`.\n # It's fine to use the reference sync commit as the sync operation\n # doesn't touch content AT files, only STM CSV ones.\n content_at_file_to = Repositext::RFile::ContentAt.new(\n '_', # Contents are initialized later via `#as_of_git_commit`\n @language,\n absolute_file_path,\n @any_content_type\n ).as_of_git_commit(@to_git_commit)\n\n compute_st_ops_attrs = {\n from_git_commit: @from_git_commit,\n to_git_commit: @to_git_commit,\n prev_last_operation_id: @prev_last_operation_id,\n execution_context: @execution_context,\n }\n\n compute_st_ops_attrs = refine_compute_st_ops_attrs(\n compute_st_ops_attrs,\n {\n from_table_release_version: @from_table_release_version,\n to_table_release_version: @to_table_release_version,\n absolute_file_path: absolute_file_path\n }\n )\n soff = SubtitleOperationsForFile.new(\n content_at_file_to,\n @repository.base_dir,\n compute_st_ops_attrs\n ).compute\n\n if soff.operations.any?\n # Only collect files that have subtitle operations\n @prev_last_operation_id = soff.last_operation_id\n fwc << soff\n end\n }\n\n # Then we add any files that have st_sync_required set to true and are\n # not in fwc already.\n @file_list.each { |content_at_filename|\n # Skip files that we have captured already\n next if fwc.any? { |soff| soff.content_at_file.repo_relative_path == content_at_filename }\n # Skip files that don't have st_sync_required set to true at to_git_commit\n dj_filename = content_at_filename.sub(/\\.at\\z/, '.data.json')\n # We use dj file contents at to_git_commit :at_child_or_ref\n dj_file = Repositext::RFile::DataJson.new(\n '_', # Contents are initialized later via #as_of_git_commit\n @language,\n dj_filename,\n @any_content_type\n ).as_of_git_commit(\n @to_git_commit,\n :at_child_or_ref\n )\n next if(dj_file.nil? || !dj_file.read_data['st_sync_required'])\n # This file is not in the list of fwc yet, and it has st_sync_required.\n # We add an soff instance with no operations. This could be a file\n # that has changes to subtitle timeslices only.\n content_at_file_from = Repositext::RFile::ContentAt.new(\n '_', # Contents are initialized later via `#as_of_git_commit`\n @language,\n content_at_filename,\n @any_content_type\n ).as_of_git_commit(@from_git_commit)\n soff = Repositext::Subtitle::OperationsForFile.new(\n content_at_file_from,\n {\n file_path: content_at_file_from.repo_relative_path,\n from_git_commit: @from_git_commit,\n to_git_commit: @to_git_commit,\n },\n [] # No operations\n )\n fwc << soff\n }\n # Return list of unique files with changes\n fwc.uniq\n end",
"def sha1(data, c: true)\n if c && $linux && File.file?(PATH_SHA1)\n File.binwrite(\"util/#{HASH_INPUT_FN}\", data)\n code = shell(\"./util/sha1 ./util/#{HASH_INPUT_FN} ./util/#{HASH_OUTPUT_FN}\")\n return nil if !code\n hash = File.binread(\"util/#{HASH_OUTPUT_FN}\")\n FileUtils.rm([\"./util/#{HASH_INPUT_FN}\", \"./util/#{HASH_OUTPUT_FN}\"])\n hash\n else\n Digest::SHA1.digest(data)\n end\nrescue => e\n lex(e, 'Failed to compute the SHA1 hash')\n nil\nend",
"def file_remote_digestsha1(file_name)\n data = read_file(file_name)\n chksum = nil\n if data\n chksum = Digest::SHA1.hexdigest(data)\n end\n return chksum\n end",
"def load_checksums(dobj)\n log ' - load_checksums()'\n dobj.object_files.each { |file| file.provider_md5 = file.md5 }\n end",
"def sha1\n @sha1 ||= digest(path, :sha1)\n end",
"def base_id\n\t\tbase_file = File.basename(sorted_files.first)\n\t\tif base_file =~ /^\\d+\\=.*\\+(.*)\\.zip$/\n\t\t\t$1\n\t\telse\n\t\t\traise RuntimeError, \"Malformed patch file #{base_file}.\"\n\t\tend\n\tend",
"def unzip_file(file)\n end",
"def core_sha1(x, len)\n # append padding\n x[len >> 5] ||= 0\n x[len >> 5] |= 0x80 << (24 - len % 32)\n x[((len + 64 >> 9) << 4) + 15] = len\n\n w = Array.new(80, 0)\n a = 1_732_584_193\n b = -271_733_879\n c = -1_732_584_194\n d = 271_733_878\n e = -1_009_589_776\n\n # for(var i = 0; i < x.length; i += 16)\n i = 0\n while i < x.length\n olda = a\n oldb = b\n oldc = c\n oldd = d\n olde = e\n\n # for(var j = 0; j < 80; j++)\n j = 0\n while j < 80\n if j < 16\n w[j] = x[i + j] || 0\n else\n w[j] = rol(w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16], 1)\n end\n\n t = safe_add(safe_add(rol(a, 5), sha1_ft(j, b, c, d)),\n safe_add(safe_add(e, w[j]), sha1_kt(j)))\n e = d\n d = c\n c = rol(b, 30)\n b = a\n a = t\n j += 1\n end\n\n a = safe_add(a, olda)\n b = safe_add(b, oldb)\n c = safe_add(c, oldc)\n d = safe_add(d, oldd)\n e = safe_add(e, olde)\n i += 16\n end\n [a, b, c, d, e]\n end",
"def read(filename)\n f = File.open(filename, 'r')\n f.seek(-ID3::ID3v1tagSize, IO::SEEK_END)\n hastag = (f.read(3) == 'TAG')\n if hastag\n f.seek(-ID3::ID3v1tagSize, IO::SEEK_END)\n @raw = f.read(ID3::ID3v1tagSize)\n\n # self.parse!(raw) # we should use \"parse!\" instead of duplicating code!\n\n if (raw.getbyte(ID3v1versionbyte) == 0) \n @version = \"1.0\"\n else\n @version = \"1.1\"\n end\n else\n @raw = @version = nil\n end\n f.close\n #\n # now parse all the fields\n\n ID3::SUPPORTED_SYMBOLS[@version].each{ |key,val|\n if val.class == Range\n # self[key] = @raw[val].squeeze(\" \\000\").chomp(\" \").chomp(\"\\000\")\n self[key] = @raw[val].strip\n elsif val.class == Fixnum\n self[key] = @raw.getbyte(val).to_s\n else \n # this can't happen the way we defined the hash..\n # printf \"unknown key/val : #{key} / #{val} ; val-type: %s\\n\", val.type\n end\n }\n hastag\n end",
"def decompressor; end",
"def update!(**args)\n @algorithm = args[:algorithm] if args.key?(:algorithm)\n @bigstore_object_ref = args[:bigstore_object_ref] if args.key?(:bigstore_object_ref)\n @blob_ref = args[:blob_ref] if args.key?(:blob_ref)\n @blobstore2_info = args[:blobstore2_info] if args.key?(:blobstore2_info)\n @composite_media = args[:composite_media] if args.key?(:composite_media)\n @content_type = args[:content_type] if args.key?(:content_type)\n @content_type_info = args[:content_type_info] if args.key?(:content_type_info)\n @cosmo_binary_reference = args[:cosmo_binary_reference] if args.key?(:cosmo_binary_reference)\n @crc32c_hash = args[:crc32c_hash] if args.key?(:crc32c_hash)\n @diff_checksums_response = args[:diff_checksums_response] if args.key?(:diff_checksums_response)\n @diff_download_response = args[:diff_download_response] if args.key?(:diff_download_response)\n @diff_upload_request = args[:diff_upload_request] if args.key?(:diff_upload_request)\n @diff_upload_response = args[:diff_upload_response] if args.key?(:diff_upload_response)\n @diff_version_response = args[:diff_version_response] if args.key?(:diff_version_response)\n @download_parameters = args[:download_parameters] if args.key?(:download_parameters)\n @filename = args[:filename] if args.key?(:filename)\n @hash_prop = args[:hash_prop] if args.key?(:hash_prop)\n @hash_verified = args[:hash_verified] if args.key?(:hash_verified)\n @inline = args[:inline] if args.key?(:inline)\n @is_potential_retry = args[:is_potential_retry] if args.key?(:is_potential_retry)\n @length = args[:length] if args.key?(:length)\n @md5_hash = args[:md5_hash] if args.key?(:md5_hash)\n @media_id = args[:media_id] if args.key?(:media_id)\n @object_id_prop = args[:object_id_prop] if args.key?(:object_id_prop)\n @path = args[:path] if args.key?(:path)\n @reference_type = args[:reference_type] if args.key?(:reference_type)\n @sha1_hash = args[:sha1_hash] if args.key?(:sha1_hash)\n @sha256_hash = args[:sha256_hash] if args.key?(:sha256_hash)\n @timestamp = args[:timestamp] if args.key?(:timestamp)\n @token = args[:token] if args.key?(:token)\n end",
"def extract(entry, dest_path, &block); end",
"def update_commitlist(h)\n h.map do |entry|\n sha = entry[\"commit\"].to_s.strip\n n = entry[\"note\"]\n # First, try to look it up in our existing repo\n if commit_exists?(sha) || sha.empty?\n entry # do nothing, put it back in the hash\n else\n # Ok, we know it doesn't exist. Now look it up in gitlog.json\n if @gitlog_json.key? sha\n m = @gitlog_json[sha][\"message\"]\n svn_id = m.lines.select {|l| l.match? /git-svn-id/ }.join.strip\n grep_cmd = <<~EOS.strip\n git -C ./tmp/src rev-list --all --grep=\"#{svn_id}\" --\n EOS\n stdout, stderr, status = Open3.capture3(grep_cmd)\n if stderr.empty?\n {\n \"commit\" => stdout.strip,\n \"note\" => <<~EOS.strip\n #{entry[\"note\"].to_s.lines.join(\"\\n\")}\n\n Formerly #{sha} before HTTPD rewrote Git history.\n EOS\n }\n else\n warn \"ERROR getting commit #{sha}. #{stderr}\"\n entry\n end\n else\n warn \"ERROR commit #{sha} does not exist in gitlog.json\"\n entry\n end\n end\n end\nend",
"def update!(**args)\n @blob_ref = args[:blob_ref] if args.key?(:blob_ref)\n @blobstore2_info = args[:blobstore2_info] if args.key?(:blobstore2_info)\n @cosmo_binary_reference = args[:cosmo_binary_reference] if args.key?(:cosmo_binary_reference)\n @crc32c_hash = args[:crc32c_hash] if args.key?(:crc32c_hash)\n @inline = args[:inline] if args.key?(:inline)\n @length = args[:length] if args.key?(:length)\n @md5_hash = args[:md5_hash] if args.key?(:md5_hash)\n @object_id_prop = args[:object_id_prop] if args.key?(:object_id_prop)\n @path = args[:path] if args.key?(:path)\n @reference_type = args[:reference_type] if args.key?(:reference_type)\n @sha1_hash = args[:sha1_hash] if args.key?(:sha1_hash)\n end",
"def getChangesOfCommit(commit_id = false)\n my_commit = ((commit_id == false and @repo.commits.size > 0) ? @repo.commits.first : @repo.commit(commit_id))\n if my_commit == nil\n return false\n end\n \n # get list of changed files and parse it\n @filelist = Hash.new\n options = {:r => true, :name_status => true, :no_commit_id => true}\n if @repo.commit(my_commit.sha).parents[0] == nil # if my_commit is the first commit\n options[:root] = true\n end\n changed_files_list = @git.diff_tree(options, my_commit.id).strip\n if changed_files_list.class == String and changed_files_list.length > 0\n changed_files_list.split(\"\\n\").each do |f|\n commit = my_commit\n operation = f[0,1] # D/M/A\n filepath = f[2..-1] # path+filename\n path = \"/\" + filepath.match(/^.+\\//).to_s # just path\n status = \"created\"\n if operation =~ /^D$/i # deleted\n # the file was deleted, so get the blob from the parent-commit\n commit = @repo.commit(my_commit.parents[0].sha)\n status = \"deleted\"\n elsif operation =~ /^M$/i # modified\n status = \"updated\"\n end\n blob = commit.tree/(filepath)\n\n #name = filepath.gsub(path[1..-1], '') #blob.name\n path = path.gsub(/\\/private\\/[0-9]+\\//,'')\n \n \n \n @filelist[\"/\" + filepath] = {\"uploaded\" => '1', \"status\" => status, \"blob_hash\" => blob.id, \"name\" => blob.name, \"path\" => \"/#{path}\", \"size\" => blob.size, \"filetype\" => blob.mime_type, \"filedate\" => @repo.commit(commit.sha).date.strftime('%T %F').to_s}\n \n \n end\n end\n\n if @filelist.size > 0\n return @filelist\n else\n return false\n end\n end",
"def changes\n @changes ||= JSON.parse(File.read(ARGV[1]))\nend",
"def unpack_using_seven_zip(tmpdir)\n if new_resource.absolute_path =~ /\\.t(ar\\.)?(gz|bz(2)?|xz)$/\n # 7-Zip doesn't know to unpack both levels of the archive on its own\n # so we need to handle this more explicitly.\n shell_out!(\"#{seven_zip_home}\\\\7z.exe x -so \\\"#{windows_path(new_resource.absolute_path)}\\\" | #{seven_zip_home}\\\\7z.exe x -si -ttar -o\\\"#{windows_path(tmpdir)}\\\"\")\n else\n shell_out!(\"#{seven_zip_home}\\\\7z.exe x -o\\\"#{windows_path(tmpdir)}\\\" \\\"#{windows_path(new_resource.absolute_path)}\\\"\")\n end\n end",
"def parse_entries\n @time_cache ||= {}\n\n if /\\A((?:.*\\n){,3})commit\\s/ =~ @content\n class << self; prepend Git; end\n parse_info($1)\n return parse_entries\n end\n\n entries = []\n entry_name = nil\n entry_body = []\n\n @content.each_line do |line|\n case line\n when /^\\s*$/ then\n next\n when /^\\w.*/ then\n entries << [entry_name, entry_body] if entry_name\n\n entry_name = $&\n\n begin\n time = parse_date entry_name\n @time_cache[entry_name] = time\n rescue ArgumentError\n entry_name = nil\n end\n\n entry_body = []\n when /^(\\t| {8})?\\*\\s*(.*)/ then # \"\\t* file.c (func): ...\"\n entry_body << $2.dup\n when /^(\\t| {8})?\\s*(\\(.*)/ then # \"\\t(func): ...\"\n entry = $2\n\n if entry_body.last =~ /:/ then\n entry_body << entry.dup\n else\n continue_entry_body entry_body, entry\n end\n when /^(\\t| {8})?\\s*(.*)/ then\n continue_entry_body entry_body, $2\n end\n end\n\n entries << [entry_name, entry_body] if entry_name\n\n entries.reject! do |(entry,_)|\n entry == nil\n end\n\n entries\n end",
"def unpack(digest)\n deltas = {}\n requests = {}\n\n digest.each do |address, digest_version|\n peer = self[address]\n if peer\n # peer already exists, find out who is more current\n if peer.max_version_seen > digest_version\n deltas[address] = peer.deltas_after(digest_version)\n elsif peer.max_version_seen < digest_version\n requests[address] = peer.max_version_seen\n end\n else\n # peer is new, ask for info\n requests[address] = 0\n add(address)\n end\n end\n\n # sort by peers with most deltas\n deltas = Hash[deltas.sort_by { |address, delta| delta.size }]\n\n [deltas, requests]\n end",
"def get_existing_commits!\n release_meta_paths = Dir.glob(\"#{RELEASE_META_DIR}/*.toml\").to_a\n\n release_meta_paths.collect do |release_meta_path|\n contents = File.read(release_meta_path)\n parsed_contents = TomlRB.parse(contents)\n release_hash = parsed_contents.fetch(\"releases\").values.fetch(0)\n release_hash.fetch(\"commits\").collect do |c|\n message_data = parse_commit_message!(c.fetch(\"message\"))\n\n {\n \"sha\" => c.fetch(\"sha\"),\n \"message\" => c.fetch(\"message\"),\n \"author\" => c.fetch(\"author\"),\n \"date\" => c.fetch(\"date\"),\n \"pr_number\" => message_data.fetch(\"pr_number\"),\n \"files_count\" => c[\"files_count\"],\n \"insertions_count\" => c[\"insertions_count\"],\n \"deletions_count\" => c[\"deletions_count\"]\n }\n end\n end.flatten\nend",
"def test_patch\n checkout_dir =File.expand_path(File.join('..','..','..','sequence', 'repository', 'Unidata@thredds'),File.dirname(__FILE__))\n repos = Rugged::Repository.new(checkout_dir)\n from = repos.lookup('49429686c3be8c3cb0aea17fca3e6684706d5fa1')\n to = repos.lookup('f63544cc69b49664a0487bf064ce0c7f64b40641')\n puts \"from #{from}\"\n puts \"to #{to}\"\n diff = to.patch(from)\n puts diff.content\n puts \"patch\"\n diff.patch.lines do |line|\n puts line\n end\n \n #.lines.each do |line|\nend",
"def core_sha1(x, len)\n # append padding\n x[len >> 5] ||= 0\n x[len >> 5] |= 0x80 << (24 - len % 32)\n x[((len + 64 >> 9) << 4) + 15] = len\n\n w = Array.new(80, 0)\n a = 1732584193\n b = -271733879\n c = -1732584194\n d = 271733878\n e = -1009589776\n\n #for(var i = 0; i < x.length; i += 16)\n i = 0\n while(i < x.length)\n olda = a\n oldb = b\n oldc = c\n oldd = d\n olde = e\n\n #for(var j = 0; j < 80; j++)\n j = 0\n while(j < 80)\n if(j < 16) \n w[j] = x[i + j] || 0\n else \n w[j] = rol(w[j-3] ^ w[j-8] ^ w[j-14] ^ w[j-16], 1)\n end\n\n t = safe_add(safe_add(rol(a, 5), sha1_ft(j, b, c, d)),\n safe_add(safe_add(e, w[j]), sha1_kt(j)))\n e = d\n d = c\n c = rol(b, 30)\n b = a\n a = t\n j += 1\n end\n\n a = safe_add(a, olda)\n b = safe_add(b, oldb)\n c = safe_add(c, oldc)\n d = safe_add(d, oldd)\n e = safe_add(e, olde)\n i += 16\n end\n return [a, b, c, d, e]\n end",
"def digest_sha1(*files)\n files.flatten.collect { |file| \n File.exists?(file) ? Digest::SHA1.hexdigest(File.read(file)) : nil\n }\n end",
"def checksums; end",
"def parse obj\nstr = obj.to_s.sub %r/\\Aurn:uuid:/, ''\nstr.gsub! %r/[^0-9A-Fa-f]/, ''\n# raw = str[0..31].lines.to_a.pack 'H*'\n# lines not supported before ruby 1.8.7, and that's not there on heroku\nraw = str[0..31].split(/\\n/).pack 'H*'\nret = new raw\nret.freeze\nret\nend",
"def load_bitbucket()\n JSON.parse(IO.read('db-1.0.json'))\nend",
"def process\n filename = \"index.markdown\"\n markdowns = {filename => []} \n state = :message\n message = [\"\\n\"]\n patch = []\n commit = nil\n (@gitlogp.split(\"\\n\")+[\"DONE\"]).each { |line|\n words=line.split\n if line.slice(0,1)==\" \" || words.length==0\n # commit messages start with 4 spaces, diff contents with 1 space\n if state==:message\n if words[0]==\"OUTPUT_FILE:\"\n filename = words[1]\n markdowns[filename] ||= []\n else\n message << \"#{line.slice(4..-1)}\"\n end\n else\n patch << \" #{line}\" if state==:patch\n end\n elsif words[0]==\"commit\" or words[0]==\"DONE\"\n if !commit.nil?\n # replace the short description line with a named link\n shortlog = message[2]\n message[2] = \"<a name='#{shortlog}'> </a>\"\n markdowns[filename] += message.map {|l|\n if l==\"SHOW_PATCH\"\n (patch+[\"{: .diff}\\n\"]).join(\"\\n\")\n else\n l\n end\n }\n series = tags[commit].slice(-2..-1)\n markdowns[filename] << \"\\n#{tags[commit]}: [view on github](#{@commit_link_base}#{commit}), [download #{series}-#{shortlog}.patch](#{@patch_link_base}/#{series}-#{shortlog}.patch)\\n{: .commit}\\n\"\n end\n \n message=[\"\\n\"]\n patch=[]\n\n commit = words[1]\n state = :message\n elsif [\"Author:\", \"Date:\", \"new\", \"index\", \"---\", \"+++\", '\\\\'].include?(words[0])\n # chomp\n elsif words[0]==\"diff\"\n state = :patch\n left = words[2].slice(2..-1)\n right = words[3].slice(2..-1)\n if left==right\n patch << \" ::: #{right}\"\n else\n patch << \" ::: #{left} -> #{right}\"\n end\n elsif words[0]==\"@@\"\n # git tries to put the function or class name after @@. This\n # works great for C diffs, but it only finds the class name in\n # Ruby, which is usually similar to the file name, Therefore\n # it's distracting cruft. Toss it.\n patch << \" #{words.slice(0,4).join(\" \")}\"\n else\n message << \"#{line.slice(4..-1)}\" if state==:message\n patch << \" #{line}\" if state==:patch \n end\n }\n output = {}\n markdowns.each do |fn, markdown|\n output[fn] = markdown.join(\"\\n\")\n Rails.logger.info(output[fn]) if respond_to? :Rails\n end\n return output\n end",
"def extract_entry(destdir, entry) # :yields action, name, stats:\n stats = {\n :current => 0,\n :currinc => 0,\n :entry => entry\n }\n\n if entry.directory?\n dest = File.join(destdir, entry.full_name)\n\n yield :dir, entry.full_name, stats if block_given?\n\n if Archive::Tar::Minitar.dir?(dest)\n begin\n FileUtils.chmod(entry.mode, dest)\n rescue Exception\n nil\n end\n else\n FileUtils.mkdir_p(dest, :mode => entry.mode)\n FileUtils.chmod(entry.mode, dest)\n end\n\n fsync_dir(dest)\n fsync_dir(File.join(dest, \"..\"))\n return\n else # it's a file\n destdir = File.join(destdir, File.dirname(entry.full_name))\n FileUtils.mkdir_p(destdir, :mode => 0755)\n\n destfile = File.join(destdir, File.basename(entry.full_name))\n FileUtils.chmod(0600, destfile) rescue nil # Errno::ENOENT\n\n yield :file_start, entry.full_name, stats if block_given?\n\n File.open(destfile, \"wb\", entry.mode) do |os|\n loop do\n data = entry.read(4096)\n break unless data\n\n stats[:currinc] = os.write(data)\n stats[:current] += stats[:currinc]\n\n yield :file_progress, entry.full_name, stats if block_given?\n end\n os.fsync\n end\n\n FileUtils.chmod(entry.mode, destfile)\n fsync_dir(File.dirname(destfile))\n fsync_dir(File.join(File.dirname(destfile), \"..\"))\n\n yield :file_done, entry.full_name, stats if block_given?\n end\n end",
"def read_object(id)\n return nil unless (index_entry = find(id))\n read_from_blobs_file(index_entry)\n end",
"def delta_structure\n DeltaStructure.new map, @new_creatures, @new_spawners, @new_quests, @knowledge_base, @new_pool, file_key, requests\n end",
"def update!(**args)\n @sha1 = args[:sha1] unless args[:sha1].nil?\n end",
"def unpack(args)\n full_path, file = args[:full_path], args[:file]\n \n # Is unpack activated?\n if self.config[:unpack][:active]\n self.files = File.directory?(full_path) ? Unpack.runner!(full_path, self.config[:unpack]) : []\n else\n self.files = []\n end\n \n if files.any?\n self.growl(self.messages[:unpack][:title], file, :unpack); return 5\n else\n return 6\n end\n end",
"def gitget(obj)\n data = gitrepo.read(obj[:oid]).data\n begin\n v = JSON.load(data)\n v = obj[:name] if v.nil?\n v\n rescue JSON::ParserError\n # parser errors are not fatal\n # this just indicates a string entry rather than a hash\n data.empty? ? obj[:name] : data\n end\n end",
"def read_binary_object(fname,fd)\n # first: read the marker byte\n buff = fd.read(1)\n\n object_length = buff.unpack(\"C*\")\n object_length = object_length[0] & 0xF\n\n buff = buff.unpack(\"H*\")\n object_type = buff[0][0].chr\n\n if(object_type != \"0\" && object_length == 15) then\n object_length = read_binary_object(fname,fd)\n object_length = object_length.value\n end\n\n retval = nil\n case object_type\n when '0' then # null, false, true, fillbyte\n retval = read_binary_null_type(object_length)\n when '1' then # integer\n retval = read_binary_int(fname,fd,object_length)\n when '2' then # real\n retval = read_binary_real(fname,fd,object_length)\n when '3' then # date\n retval = read_binary_date(fname,fd,object_length)\n when '4' then # data\n retval = read_binary_data(fname,fd,object_length)\n when '5' then # byte string, usually utf8 encoded\n retval = read_binary_string(fname,fd,object_length)\n when '6' then # unicode string (utf16be)\n retval = read_binary_unicode_string(fname,fd,object_length)\n when 'a' then # array\n retval = read_binary_array(fname,fd,object_length)\n when 'd' then # dictionary\n retval = read_binary_dict(fname,fd,object_length)\n end\n\n return retval\n end",
"def reproc_true(file)\n full_path = file\n if (! File.exists?(full_path))\n # Rubymatica.save_status(dir_uuid, \"Can't reprocess, #{full_path} does not exist.\")\n print \"Can't reprocess, #{full_path} does not exist.\\n\";\n exit\n end\n extract_flag = false\n dir_uuid = File.basename(full_path)\n \n # When reprocessing, the meta_data/info.db already exists (or else\n # this will fail), therefore we can look in the db for info such\n # as the name of this ingest.\n \n my_ig = Ingest.new(dir_uuid)\n \n base_name = my_ig.read_meta(\"ingest_name\")\n tub = \"#{Dest}/#{dir_uuid}/#{Accession_dir}\"\n \n Rubymatica.save_status(dir_uuid, \"Reprocessing #{Accession_dir} uuid: #{dir_uuid}\")\n \n igl_dest = \"#{Dest}/#{dir_uuid}/#{Ig_logs}\"\n pv_dir = \"#{Dest}/#{dir_uuid}/#{Pv}\"\n md_dir = \"#{Dest}/#{dir_uuid}/#{Meta}\"\n ac_dir = \"#{Dest}/#{dir_uuid}/#{Accession_dir}\"\n \n # Delete any previously machine created files. Keeping files\n # would mean using previously created file uuid's and that\n # would be a mess. This codes doesn't have the architecture\n # for that.\n\n # Use an anonymous array because we can. Directories that we'll\n # clean up, followed by a list of files we will *not* delete.\n\n [igl_dest, pv_dir, md_dir].each { |path|\n Find.find(path) { |file|\n if (file.match(Dcx) or \n file.match(Db_name))\n next;\n end\n\n # Great line of code. OOP totally rocks. Not! Class,\n # method and variable all the same name, and you thought\n # programmers didn't have a sense of humor.\n \n if (File.file?(file))\n File.delete(file)\n end\n }\n }\n return base_name,dir_uuid, my_ig, tub, igl_dest, pv_dir, md_dir, ac_dir, extract_flag\n end",
"def download_remote_sha1\n @log.info('Downloading Elasticsearch SHA1.')\n\n @remote_sha1 = ''\n open(@download.verify_url) do |file|\n @remote_sha1 = file.read\n end\n\n @remote_sha1 = @remote_sha1.split(/\\s\\s/)[0]\n\n @remote_sha1\n end",
"def decompress_file(file, to)\n end",
"def get_sha1(path)\n return @cache_sha1 unless @cache_sha1.nil?\n sha1 = \"\"\n if File.exist?(path)\n Dir.chdir(path) do\n sha1 = %x(git rev-parse HEAD).delete(\"\\n\")\n end\n end\n\n @cache_sha1 = sha1\n sha1\n end",
"def put_raw_object(content, type)\n size = content.length.to_s\n LooseStorage.verify_header(type, size)\n \n header = \"#{type} #{size}\\0\"\n store = header + content\n \n sha1 = Digest::SHA1.hexdigest(store)\n path = @directory+'/'+sha1[0...2]+'/'+sha1[2..40]\n \n if !File.exists?(path)\n content = Zlib::Deflate.deflate(store)\n \n FileUtils.mkdir_p(@directory+'/'+sha1[0...2])\n File.open(path, 'w') do |f|\n f.write content\n end\n end\n return sha1\n end",
"def initialize(name, opener)\n @name = name\n @opener = opener\n opener.open(name, \"r\") do |fp|\n # Check signature\n unless fp.read(4) == \"PACK\"\n raise ArgumentError.new(\"#{name} is not a packfile.\")\n end\n @version = fp.read(4).unpack(\"N\").first\n @size = fp.read(4).unpack(\"N\").first\n cur = fp.tell\n fp.seek(0, IO::SEEK_END)\n @end_of_data = fp.tell - 20\n end\n possible_index_path = name[0..(name.size - File.extname(name).size - 1)] + \".idx\"\n if File.exist? possible_index_path\n # use a persistent file pointer\n fp = File.open(possible_index_path, \"r\")\n @index = PackFileIndex.parse(fp)\n end\n @offset_cache = {}\n end",
"def diff_dumps(file, timestamp1, timestamp2)\n begin\n puts \"Getting diff comparison from dumps at #{timestamp1} and #{timestamp2}\"\n tempfile1 = Tempfile.new('owr-')\n tempfile2 = Tempfile.new('owr-')\n find_dump(file, timestamp1, tempfile1)\n find_dump(file, timestamp2, tempfile2)\n tempfile1.close\n tempfile2.close\n difference = `diff #{tempfile1.path} #{tempfile2.path}`\n puts difference\n ensure\n tempfile1.unlink\n tempfile2.unlink\n end\nend",
"def path_from_sha1(sha1)\n raise NotImplementedError, 'Implemented in subclasses. See filesystem_pool for example.'\n end",
"def update_filepath(_package_id:, _filepath:, _sha1:, _size:); end",
"def pack\n end",
"def save_single_zip_to_redis(zip_url,redis)\n file = open(zip_url)\n Zip::File.open(file) do |zip_file|\n zip_file.each do |entry|\n #looks like there are duplicates with slight differences in username, posts, comments etc\n #I can make them unique by cutting out by discussion title, discussion title+forum, etc as key.\n #otherwise, if I can assume each unique xml filename is a unique instance that we want I can just use that as key\n redis.set(entry.name, entry.get_input_stream.read)\n p entry.name\n p entry.get_input_stream.read\n p entry\n puts \"\"\n end\n end\nend",
"def unszip_file(f, to)\n line(\"7z\", \"x -y {archive} -o{dest} * -r\").pass(archive: f, dest: to)\n end",
"def json\n delta_pack.to_json\n end",
"def zipfile=(_arg0); end",
"def sha1(name)\n Digest::SHA1.file(path(name)).hexdigest\n end",
"def parse_zip64_extra(for_local_header); end",
"def diff2; end",
"def zip_contents; end",
"def one_round_trip(dir, name)\n obj = File.open(File.join(dir, \"#{name}.xson\")) { |f|\n begin\n XMLToRuby.new.xml_to_ruby(f)\n rescue => ex # unsupported type\n return\n end\n }\n\n File.open(File.join(dir, \"#{name}.bson\"), 'rb') { |f|\n # Read the BSON from the file\n bson = f.read\n\n # Turn the Ruby object into BSON bytes and compare with the BSON bytes\n # from the file.\n bson_from_ruby = BSON.serialize(obj)\n\n begin\n assert_equal bson.length, bson_from_ruby.to_s.length\n assert_equal bson, bson_from_ruby.to_s\n rescue => ex\n# File.open(File.join(dir, \"#{name}_out_a.bson\"), 'wb') { |f| # DEBUG\n# bson_from_ruby.each { |b| f.putc(b) }\n# }\n raise ex\n end\n\n # Turn those BSON bytes back into a Ruby object.\n #\n # We're passing a nil db to the contructor here, but that's OK because\n # the BSON DBRef bytes don't contain the db object in any case, and we\n # don't care what the database is.\n obj_from_bson = BSON.deserialize(bson_from_ruby)\n assert_kind_of OrderedHash, obj_from_bson\n\n # Turn that Ruby object into BSON and compare it to the original BSON\n # bytes.\n bson_from_ruby = BSON.serialize(obj_from_bson)\n begin\n assert_equal bson.length, bson_from_ruby.to_s.length\n assert_equal bson, bson_from_ruby.to_s\n rescue => ex\n# File.open(File.join(dir, \"#{name}_out_b.bson\"), 'wb') { |f| # DEBUG\n# bson_from_ruby.each { |b| f.putc(b) }\n# }\n raise ex\n end\n }\n end"
] | [
"0.552862",
"0.5391449",
"0.5317819",
"0.5299938",
"0.526601",
"0.52436566",
"0.5187634",
"0.5091055",
"0.50745004",
"0.5057651",
"0.5035649",
"0.50208414",
"0.5008146",
"0.49874026",
"0.49746084",
"0.49548158",
"0.49505496",
"0.4945198",
"0.49419287",
"0.493956",
"0.493956",
"0.49265614",
"0.4922092",
"0.49045825",
"0.4903273",
"0.49010313",
"0.48933172",
"0.48618436",
"0.4857594",
"0.48392212",
"0.48284724",
"0.48260885",
"0.47984388",
"0.47983637",
"0.47944334",
"0.4793459",
"0.4789027",
"0.4784634",
"0.47727385",
"0.4764266",
"0.4744745",
"0.47259006",
"0.47039288",
"0.47035637",
"0.46863857",
"0.468081",
"0.46792307",
"0.46753287",
"0.46722767",
"0.46639735",
"0.465842",
"0.4650133",
"0.4646923",
"0.46458378",
"0.46422726",
"0.4628269",
"0.46271586",
"0.46267915",
"0.46243978",
"0.46187985",
"0.46179605",
"0.46163905",
"0.46110344",
"0.46037325",
"0.4600684",
"0.4596064",
"0.45889556",
"0.45852664",
"0.45843783",
"0.45808244",
"0.45806038",
"0.45780984",
"0.45757192",
"0.45693526",
"0.4568934",
"0.4564158",
"0.45620444",
"0.45605057",
"0.45595446",
"0.45535693",
"0.45508236",
"0.4549332",
"0.45446742",
"0.45404404",
"0.45294994",
"0.4526818",
"0.452535",
"0.45231673",
"0.45220807",
"0.4520035",
"0.45168847",
"0.4516427",
"0.4515342",
"0.4513633",
"0.45067903",
"0.4505954",
"0.450289",
"0.45020184",
"0.450083",
"0.44978514"
] | 0.45495617 | 81 |
GET /character/geo_positions/1 GET /character/geo_positions/1.json | def show
@character_geo_position = Character::GeoPosition.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @character_geo_position }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new\n @character_geo_position = Character::GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def index\n @positions = Position.all.order :id\n @geojson = Hash.new\n\n @points = Position.geopoints\n @geojson = {\n type: 'FeatureCollection',\n features: @points\n }\n\n respond_to do |format|\n format.html\n format.json { render json: @geojson } # respond with the created JSON object\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def find\n render json: format_places(closest(params[:latitude], params[:longitude]).take(3))\n end",
"def show\n @character = Character.find(params[:id])\n @json = Character.find(params[:id]).to_gmaps4rails\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @character }\n end\n end",
"def search_coord_route\n sql_results = Route.select(\"ST_AsGeoJSON(path) as path\")\n .where(\"cod_route = ?\", \"#{params[:id]}\").to_sql\n\n resp = ActiveRecord::Base.connection.execute(sql_results).map do |value|\n ActiveSupport::JSON.decode(value[\"path\"])[\"coordinates\"]\n end\n render json: resp\n end",
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def geo(id)\n get \"/geo/id/#{id}.json\"\n end",
"def index\n @positions = Position.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @positions }\n end\n end",
"def update\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n if @character_geo_position.update_attributes(params[:character_geo_position])\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def points\n locale = Location.create( :address => params[:locale] )\n lat = locale.latitude.to_f\n lng = locale.longitude.to_f\n\n # instagram api request\n url = \"https://api.instagram.com/v1/locations/search.json?lat=#{lat.round(3)}&lng=#{lng.round(3)}&client_id=efea46f4c52542348ced4c529263cf33\"\n\n # creates an instance variable @result with the json object\n @result = HTTParty.get url\n\n # makes the json data available for ajax\n render :json => @result\n end",
"def show\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\n @post_geo = current_user.post_geos.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @post_geo }\n end\n end",
"def get_location\n as_json(get_results('/locations.json'))\n end",
"def show\n @unko_position = UnkoPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @unko_position }\n end\n end",
"def map_coordinates\n loc = find_or_create_geolocation(params)\n respond_to do |format|\n @geolocation_boxes = GeolocationBox.only_geo_bbox(resource.id)\n @geolocation_box = loc.geolocation_box\n format.js { render template: 'stash_datacite/geolocation_boxes/create.js.erb' }\n end\n end",
"def stops_by_position\n get '/gtfs/stops/geosearch/'\n end",
"def show\n @position = Position.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\n @position = Position.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\n @position = Position.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\n @position = Position.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position }\n end\n end",
"def show\r\n if Item.exists?(params[:id])\r\n @item = Item.find(params[:id])\r\n else\r\n redirect_to :home_page\r\n return\r\n end\r\n lats = []\r\n longs = []\r\n @item.item_histories.each do |it|\r\n lats << it.latitude\r\n longs << it.longitude\r\n end\r\n @positions = {:longs => longs, :lats => lats, :histories => @item.item_histories.paginate(:page => params[:page], :per_page => 10)}\r\n respond_to do |format|\r\n format.html # show.html.erb\r\n format.json { render json: @item }\r\n end\r\n end",
"def get_room_stuff(pos)\r\n my_port = 8083\r\n room_map_message = \"/maps/#{$roomnumber}/#{pos}\"\r\n url = URI.parse(\"http://localhost:#{my_port}#{room_map_message}\")\r\n req = Net::HTTP::Get.new(url.to_s)\r\n res = Net::HTTP.start(url.host, url.port){|http|\r\n http.request(req)\r\n }\r\n my_json = JSON.parse(res.body) \r\n if my_json[\"east\"]\r\n return my_json[\"east\"]\r\n \r\n elsif my_json[\"west\"]\r\n return my_json[\"west\"]\r\n\r\n elsif my_json[\"north\"]\r\n return my_json[\"north\"]\r\n\r\n elsif my_json[\"contents\"]\r\n return my_json[\"contents\"]\r\n\r\n elsif my_json[\"south\"]\r\n return my_json[\"south\"]\r\n\r\n elsif my_json[\"down\"]\r\n return my_json[\"down\"]\r\n\r\n elsif my_json[\"up\"]\r\n return my_json[\"up\"] \r\n end\r\nend",
"def show\n @text_position = TextPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @text_position }\n end\n end",
"def geo\n\t\tciudad = params[:id]\n\t\t@ubication = City.where(id: ciudad)\n\t\trespond_to do |format|\n\t\t\tformat.json { render json: @ubication }\n\t\tend\n\tend",
"def show\n @user_position = UserPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @user_position }\n end\n end",
"def index\n @positives = Positive.all\n @positives_json= Positive.all.map(&:lonlat).as_json\n\n\n\n\n\n end",
"def index\n @user_positions = UserPosition.paginate(:page => params[:page]).per_page(10)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @user_positions }\n end\n end",
"def index\n @support = find_support\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @positions }\n end\n end",
"def localization\n #debugger\n searched = params[:finger_print]\n @coordinates = FingerPrint.KNN(searched)\n puts @coordinates\n respond_to do |format|\n format.html \n format.json {render json: @coordinates}\n end\n end",
"def locations\n get('locations')\n end",
"def show\n educacao = Educacao.find(params[:id])\n ponto = educacao.the_geom\n @latitude = ponto.x\n @longitude = ponto.y\n\n respond_to do |format|\n format.html\n format.json {render json: educacao}\n end\n end",
"def show\n \t@internship_position = InternshipPosition.find(params[:id])\n\n \trespond_to do |format|\n \t\tformat.html # show.html.erb\n \t\tformat.json { render json: @internship_position }\n \tend\n end",
"def geo\n fetch('creature.bird.geo')\n end",
"def show\n @crew_position = CrewPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @crew_position }\n end\n end",
"def get_json\n response = conn.get(@current_location)\n parsed = JSON.parse(response.body, symbolize_names: true)\n\n end",
"def show\n @position_mapper = PositionMapper.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @position_mapper }\n end\n end",
"def show\n @map = Map.find_by_id(params[:id])\n @saved_locations = @map.points.for_editing.to_json\n\n respond_to do |format|\n format.html # show.html.erb\n format.xml { render :xml => @map }\n end\n end",
"def map\n @locations = MonzoTransaction.where(\"lat IS NOT NULL AND lng IS NOT NULL\")\n\n respond_to do |format|\n format.html # map.html.erb\n format.json { render json: @locations, callback: params[:callback] }\n format.xml { render xml: @locations }\n end\n end",
"def get_info(lat, lon)\n Net::HTTP.get(URI(\"https://api.3geonames.org/#{lat},#{lon}.json\"))\nend",
"def index\n @characters = Character.all\n\n render json: @characters\n end",
"def locations(place)\n get :loc => place\n end",
"def get_coords(address = '', token = '')\n # Change spaces to + to create a safe URLstring\n address = address.strip.gsub(/\\s/, '+')\n\n # API-URL for fetching the addresses geo-location\n url = \"https://api.tiles.mapbox.com/v4/geocode/mapbox.places/farmington,87401,#{address}.json?proximity=-108.20833683013916,36.73038906153143&access_token=#{token}\"\n\n # Output address of where the crime occurred\n puts \"Address: #{address}\"\n\n # get URL Response\n response = open(url).read\n\n # Process response at JSON rather than plain text\n results = JSON.parse(response)\n\n coords = []\n if !results['features'][0].nil?\n coords[0] = results['features'][0]['geometry']['coordinates'][0]\n coords[1] = results['features'][0]['geometry']['coordinates'][1]\n else\n coords = [0, 0]\n end\n\n coords\nend",
"def map\n @locations = Gig.where(\"lat IS NOT NULL AND lng IS NOT NULL\")\n\n respond_to do |format|\n format.html # map.html.erb\n format.json { render json: @locations, callback: params[:callback] }\n format.xml { render xml: @locations }\n end\n end",
"def show\n @campus_food = CampusFood.find(params[:id])\n\t@loc = params[:loc]\n\t\n\t@locations = Location.all(:conditions =>[ \"loc like ? \", \"%#{params[:loc]}%\"])\n\tif !@locations.empty?\n @lat = @locations[0].lat\n @lng = @locations[0].lng\n end\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @locations }\n end\n end",
"def get_geo_key\n unless(params[:geo] )\n render_error \"incorrect params\"\n return\n end\n geo_key = ensure_geo_key_exists params[:geo].downcase\n unless geo_key\n render json: {\"error\" => {\"message\" => \"geo_not_found\"}}\n return\n end\n return render json: geo_key.as_json\n end",
"def index\n @position_mappers = PositionMapper.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @position_mappers }\n end\n end",
"def show\n render json: @character\n end",
"def index\n @location_points = LocationPoint.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @location_points }\n end\n end",
"def get_coord\n @single_city_data['coord']\n end",
"def get_coordinates\n checked_google_response do\n return parsed_response if raw\n parsed_response[\"results\"].inject([]) do |memo, result|\n memo << { \n :lat => result[\"geometry\"][\"location\"][\"lat\"], \n :lng => result[\"geometry\"][\"location\"][\"lng\"],\n :matched_address => result[\"formatted_address\"],\n :bounds => result[\"geometry\"][\"bounds\"],\n :full_data => result\n }\n end\n end\n end",
"def geocoder\n begin\n render(:json => open(\"#{GEOCODER_URL}&#{request.query_string}\"){|f| f.read})\n rescue SocketError\n render(:text => $!.to_s, :status => 500)\n end\n end",
"def correct_geo_coords\n coords = params.seek :geo, :geojson, :geometry, :coordinates\n if coords\n array = GeosHelper.geo_coords_to_array(coords)\n params[:geo][:geojson][:geometry][:coordinates] = array\n end\n end",
"def user_location\n return JSON.parse open(\"http://freegeoip.net/json\").read\nend",
"def index\n @coords_num = Coordinate.count\n if @coords_num >= 4800\n @latest_coords = Coordinate.order('created_at DESC').limit(10000).reverse\n else\n @latest_coords = Coordinate.all\n end\n @coords = { 'c' => [] }\n @latest_coords.each do |v|\n\n a = []\n colour = v['colour']\n colour[0] = ''\n a << colour\n a << v['x']\n a << v['y']\n @coords['c'] << a\n\n end\n render json: @coords\n end",
"def get_some_room_stuff(roomnumberone,pos)\r\n my_port = 8083\r\n room_map_message = \"/maps/#{roomnumberone}/#{pos}\"\r\n url = URI.parse(\"http://localhost:#{my_port}#{room_map_message}\")\r\n req = Net::HTTP::Get.new(url.to_s)\r\n res = Net::HTTP.start(url.host, url.port){|http|\r\n http.request(req)\r\n }\r\n my_json = JSON.parse(res.body) \r\n if my_json[\"east\"]\r\n return my_json[\"east\"]\r\n \r\n elsif my_json[\"west\"]\r\n return my_json[\"west\"]\r\n\r\n elsif my_json[\"north\"]\r\n return my_json[\"north\"]\r\n\r\n elsif my_json[\"contents\"]\r\n return my_json[\"contents\"]\r\n\r\n elsif my_json[\"south\"]\r\n return my_json[\"south\"]\r\n\r\n elsif my_json[\"down\"]\r\n return my_json[\"down\"]\r\n\r\n elsif my_json[\"up\"]\r\n return my_json[\"up\"] \r\n end\r\nend",
"def index\n @map = Map.find(params[:map_id])\n if @map.kind == \"activity\"\n @locations = @map.locations.activity\n elsif @map.kind == \"news\"\n @locations = @map.locations.news\n else\n @locations = @map.locations\n end\n respond_to do |format|\n format.json { render :json => @locations.as_json(:include => :location_pin)}\n end\n end",
"def index\n @geopoints = Geopoint.all\n @jsons = @geopoints.to_gmaps4rails\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @geopoints }\n end\n end",
"def current_spots\n\n if current_spots_params[:filter] && current_spots_params[:filter][:location]\n location = RGeo::GeoJSON.decode(current_spots_params[:filter][:location])\n end\n # assign default location (LA City Hall) if one couldn't be deciphered from the params.\n location ||= LA_LOCATION\n\n limit = current_spots_params[:limit] || 20\n\n @spots = Spot.current.near(location).limit(limit).includes(:guesses, :user, :game)\n\n respond_to do |format|\n format.json\n end\n end",
"def location_primer\n render(json: location_list)\n end",
"def activity_near_by\n @activity = Activity.find(params[:id])\n gps = params[:latitude]+','+ params[:longitude]\n @activity.parks.near(gps, 100).size\n respond_to do |format|\n format.json do\n render :json => @activity.parks.near(gps, 100).reverse\n end\n end\n end",
"def get_coords(address)\n file = open(\"https://maps.googleapis.com/maps/api/geocode/json?address=#{URI.escape(address)}&key=AIzaSyARfm-3M5KppQjgGnHK1DIYt-GmG4eJqCI\")\n contents = file.read()\n hash = JSON.parse(contents)\n lat = hash[\"results\"][0][\"geometry\"][\"location\"][\"lat\"].to_f\n lng = hash[\"results\"][0][\"geometry\"][\"location\"][\"lng\"].to_f\n return [lat, lng]\nend",
"def index\n @coordinates = @track.coordinates.for_user current_user\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @coordinates }\n end\n end",
"def index\n characters = @project.characters.all\n render json: { characters: characters }\n end",
"def api_request(city)\n url = \"https://jobs.github.com/positions.json?utf8=%E2%9C%93&description=&location=\"+city\n #Escape and parse URL for proper formatting.\n escaped_url = URI.escape(url)\n parsed_url = URI.parse(escaped_url)\n #Make the HTTP request.\n request = Net::HTTP.get(parsed_url)\n #Parse the JSON response.\n response = JSON.parse(request)\n #Returns all listings for the given city.\n return response\n end",
"def whereAmI\n \n lat = params[:latitud]\n long = params[:longitud]\n \n @donde = Geocoder.search(lat + \",\" + long)[0]\n \n render :json => {:direccion => @donde.address }\n \n end",
"def index\n @locs = Loc.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @locs }\n end\n end",
"def geo; end",
"def index\n if params[:x1]\n @places = Place.by_bounds( params[:x1], params[:y1], params[:x2], params[:y2], params )\n end\n #@places = Place.without_nodes\n respond_with(@places)\n end",
"def get_positions\n all_players = consume_player_data\n keep_position_info(all_players)\n end",
"def get_nearby_food_pictures\n if params[:lat] and params[:lng]\n @restaurants = Restaurant.all\n # @buildings = Building.find( :all, :conditions => [ 'name LIKE ?', '%' + params[:q].downcase + '%'], :limit => 20)\n \n end\n\n render :json => @restaurants\n end",
"def objects_at_location\n map.objects_at_player_location\n end",
"def show\n @bemap = Bemap.find(params[:id])\n @bemap_polylines = []\n @bemap_characters = []\n @bemap.characters.each do |v|\n @bemap_characters << { :lng => v[:longitude], :lat => v[:latitude]}\n end\n @bemap_polylines << @bemap_characters\n @bemap_polylines = @bemap_polylines.to_json\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bemap }\n end\n end",
"def coordinates\n location = Location.new(location_params)\n if location.save\n render json: location\n else\n render json: \"ERROR\"\n end\n end",
"def locations\n @client.get('/BikePoint')\n end",
"def index\n locations = Location.all\n render json: locations\n end",
"def location\n { latLng: [city.lat, city.lng], name: city.name, status: name.downcase.to_s }\n end",
"def client_choose(offset = 10, limit = 20)\n response = Net::HTTP.get(\n URI(\"https://pokeapi.co/api/v2/pokemon/?offset=#{offset}&limit=#{limit}\")\n )\n \n JSON.parse(response)\nend",
"def show\n @client = Client.find(params[:id])\n @pets = @client.pets\n @json = @client.to_gmaps4rails\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @client }\n end\n end",
"def index\n @post_geos = current_user.settings.post_geos || []\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @post_geos }\n end\n end",
"def show\n @admin_geonode = Admin::Geonode.find(params[:id])\n @json = @admin_geonode.to_gmaps4rails\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @admin_geonode }\n end\n end",
"def show\n h = Hotel.find( params[:id] )\n @hotel = h\n @coords = Hotel.hotelJsonCoords( h )\n end",
"def geo(place_id)\n get \"/geo/id/#{place_id}.json\"\n end",
"def index\n @positions = @mold.positions\n @photos = @mold.photos.new\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @positions }\n end\n end",
"def index\n @positions = current_user.positions.all\n end",
"def show\n @county = Entity.where(id: params[:id]).where(entity_type: 'County').first\n respond_with(@county) do |format|\n format.geojson { render text: @county.to_geojson }\n end\n end",
"def get_address_and_loc\n id = params[:place_id].to_i\n place = Place.find(id)\n render json: { address: place.address, loc: place.neighborhood }\n end",
"def positions(id)\n get(\"/accounts/#{id}/positions\")['positions']\n end",
"def getLocation\n loc = Addressable::URI.new(\n :scheme => \"https\",\n :host => \"maps.googleapis.com\",\n :path => \"maps/api/geocode/json\",\n :query_values => {:address => \"160+Folsom,+San+Francisco,+CA\",\n :sensor => \"false\"}).to_s\n\n location_request = RestClient.get(loc)\n parsed_location_request = JSON.parse(location_request)\n\n lat = parsed_location_request[\"results\"][0][\"geometry\"][\"location\"][\"lat\"].to_s\n lng = parsed_location_request[\"results\"][0][\"geometry\"][\"location\"][\"lng\"].to_s\n\n [lat,lng]\nend",
"def active_positions\n authenticated_post(\"auth/positions\").body\n end",
"def index\n render json: @places\n end",
"def map\n @locations = CyclingEvent.where(\"lat IS NOT NULL AND lng IS NOT NULL\")\n\n respond_to do |format|\n format.html # map.html.erb\n format.json { render json: @locations, callback: params[:callback] }\n format.xml { render xml: @locations }\n end\n end",
"def index\n @characters = Character.all.paginate page: params[:page]\n render json: {\n current_page: @characters.current_page,\n per_page: @characters.per_page,\n total: @characters.total_entries,\n results: @characters\n }\n end",
"def extract_coordinates(parsed)\n parsed['results'].first['geometry']['location']\nend",
"def route\n hitch = Hitch.find(params[:hitch_id])\n render json: hitch.geojson\n end",
"def location\n { latLng: [lat, lng], name: name, status: name.downcase.to_s }\n end",
"def from_name\n @locations = Location.where(\"name like ?\", \"%#{params[:id]}%\") \n\n lat = params[:lat]\n lon = params[:lon]\n\n if(lat and lon)\n @locations = Location.nearest_five(lat.to_f, lon.to_f, @locations)\n end\n\n respond_to do |format|\n format.html\n format.json { render :json => @locations }\n end\n end",
"def index\n @geos = Geo.all\n end",
"def index\n @positions = Position.all(:order=>'name')\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @positions }\n end\n end",
"def lat\n @position[0]\n end"
] | [
"0.65410924",
"0.63755864",
"0.61864567",
"0.60786736",
"0.6040894",
"0.60065997",
"0.59626114",
"0.5901784",
"0.5868109",
"0.5863196",
"0.576979",
"0.57410073",
"0.5722417",
"0.5721037",
"0.5698015",
"0.56925607",
"0.5685136",
"0.568214",
"0.568214",
"0.568214",
"0.568214",
"0.56693816",
"0.56654006",
"0.5657514",
"0.5656871",
"0.5647917",
"0.5633976",
"0.5566756",
"0.55468273",
"0.55332106",
"0.55285513",
"0.5499289",
"0.54953223",
"0.54924124",
"0.5481651",
"0.5454241",
"0.54524004",
"0.5431857",
"0.54133207",
"0.5405561",
"0.5402126",
"0.540181",
"0.5386383",
"0.53821063",
"0.53650737",
"0.53611624",
"0.53551507",
"0.5343747",
"0.53383756",
"0.5335395",
"0.53316706",
"0.53256994",
"0.5307964",
"0.5295649",
"0.52920413",
"0.52813786",
"0.5272474",
"0.52625763",
"0.5262113",
"0.525788",
"0.5242396",
"0.52375114",
"0.5234152",
"0.5231152",
"0.52284545",
"0.52222335",
"0.5216479",
"0.52107584",
"0.519844",
"0.5190533",
"0.51813376",
"0.5177827",
"0.5166835",
"0.51659876",
"0.5159494",
"0.5152596",
"0.514987",
"0.5148478",
"0.51435304",
"0.51417637",
"0.5139109",
"0.51367426",
"0.51330584",
"0.51304114",
"0.5129333",
"0.5129094",
"0.51241124",
"0.5117153",
"0.5114478",
"0.51133436",
"0.51082766",
"0.5105451",
"0.5104975",
"0.5097849",
"0.5095862",
"0.50921965",
"0.5087562",
"0.5087388",
"0.5083236",
"0.50799775"
] | 0.7293571 | 0 |
GET /character/geo_positions/new GET /character/geo_positions/new.json | def new
@character_geo_position = Character::GeoPosition.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @character_geo_position }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @position = Position.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position }\n end\n end",
"def new\n @position = Position.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position }\n end\n end",
"def new\n @position = Position.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position }\n end\n end",
"def new\n @position_mapper = PositionMapper.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position_mapper }\n end\n end",
"def new\n @text_position = TextPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @text_position }\n end\n end",
"def new\n @user_position = UserPosition.new\n @positions = Position.all\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @user_position }\n end\n end",
"def new\n @loc = current_user.locs.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @loc }\n end\n end",
"def create\n begin\n @new_position = @@data_util.hash_data_to_upper_case(params[:position], ['description'])\n @new_position[:createdby] = session[:username]\n @new_position[:mypclient_id] = session[:client_id]\n\n @position = Position.new(@new_position)\n if @position.save\n @@request_result[:success] = true\n @@request_result[:notice] =\"Position was successfully created.\"\n else\n @@request_result[:errormsg] = @position.errors.full_messages[0]\n end\n rescue Exception => e\n @@request_result[:errormsg] = e.message\n end\n render json: @@request_result\n end",
"def new\n \t@internship_position = InternshipPosition.new\n\n \trespond_to do |format|\n \t\tformat.html #new.html.erb\n \t\tformat.json { render json: @internship_position }\n \tend\n end",
"def new\n @crew_position = CrewPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @crew_position }\n end\n end",
"def new\n @position = Position.new\n \n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @position }\n end\n end",
"def new\n @position_history = PositionHistory.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position_history }\n end\n end",
"def new\n @locationmap = Locationmap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @locationmap }\n end\n end",
"def create\n positions = JSON.parse(params['positions'])\n\n # Fill in body to initialize the game and return a 200 response\n\n render plain: \"OK\"\n end",
"def new\n @map = current_user.maps.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map }\n end\n end",
"def new\n @position = Position.new\n @organisational = Dictionary.find(:all, :order => \"place ASC\", :conditions => {:indicator => 1})\n @functional = Dictionary.find(:all, :conditions => { :indicator => 2 })\n @method = Dictionary.find(:all, :conditions => { :indicator => 3 })\n @leadership = Dictionary.find(:all, :conditions => { :indicator => 4 })\n @social = Dictionary.find(:all, :conditions => { :indicator => 5 })\n\n 1.times { @position.organisationals.build }\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @position }\n end\n end",
"def create\n @geo = Geo.new(geo_params)\n\n respond_to do |format|\n if @geo.save\n format.html { redirect_to @geo, notice: 'Geo was successfully created.' }\n format.json { render :show, status: :created, location: @geo }\n else\n format.html { render :new }\n format.json { render json: @geo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @position = Position.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @position }\n end\n end",
"def new\n @map = Map.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map }\n end\n end",
"def new\n @location_point = LocationPoint.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location_point }\n end\n end",
"def new\n @post_geo = current_user.post_geos.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @post_geo }\n end\n end",
"def new\n @coord = @member.build_coord\n \n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @coord }\n end\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @map_node = Map::Node.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map_node }\n end\n end",
"def new\n @curpg = :admintools\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @possess = Possess.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @possess }\n end\n end",
"def new\n @geopoint = Geopoint.new\n @lat = 13.7522222\n @lng = 100.4938889\n @json = '[{\"lng\": \"100.4938889\", \"lat\": \"13.7522222\"}]'\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @geopoint }\n end\n end",
"def new\n @processed_location = ProcessedLocation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @processed_location }\n end\n end",
"def new\n @map_marker = MapMarker.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map_marker }\n end\n end",
"def create\n @lost_pet = LostPet.new(params[:lost_pet])\n @lost_pet.coordinate = ParseGeoPoint.new :latitude => params[:latitude].to_f, :longitude => params[:longitude].to_f\n respond_to do |format|\n if @lost_pet.save\n format.html { redirect_to @lost_pet, notice: 'Lost pet was successfully created.' }\n format.json { render json: @lost_pet, status: :created, location: @lost_pet }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lost_pet.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @spot = Spot.new\n lat, lng = params[:ltng].scan(/\\((.+)\\)/).flatten.first.split(',')\n @spot.latitude = lat\n @spot.longitude = lng\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spot }\n end\n end",
"def new\n position_id = params[:id]\n @position = Position.find(params[:id])\n end",
"def create\n @crew_position = CrewPosition.new(params[:crew_position])\n\n respond_to do |format|\n if @crew_position.save\n format.html { redirect_to @crew_position, :notice => 'Crew position was successfully created.' }\n format.json { render :json => @crew_position, :status => :created, :location => @crew_position }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @crew_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def new\r\n @location = Location.new\r\n\r\n respond_to do |format|\r\n format.html # new.html.erb\r\n format.json { render json: @location }\r\n end\r\n end",
"def new\r\n @location = Location.new\r\n\r\n respond_to do |format|\r\n format.html # new.html.erb\r\n format.json { render json: @location }\r\n end\r\n end",
"def new\n @location = Location.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @location }\n end\n end",
"def new\n @location = Location.new\n @json = @location.to_gmaps4rails\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def new\n @location_url_map = LocationUrlMap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location_url_map }\n end\n end",
"def map_coordinates\n loc = find_or_create_geolocation(params)\n respond_to do |format|\n @geolocation_boxes = GeolocationBox.only_geo_bbox(resource.id)\n @geolocation_box = loc.geolocation_box\n format.js { render template: 'stash_datacite/geolocation_boxes/create.js.erb' }\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @location = Location.new \n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @location }\n end\n end",
"def new\n @select_markers = return_markers\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @place }\n end\n end",
"def new\n @map = Map.new\n\n puts @saved_locations\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @map }\n end\n end",
"def new\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location }\n end\n end",
"def create\n \t@internship_position = InternshipPosition.new(internship_position_params)\n\n \trespond_to do |format|\n \t\tif @internship_position.save\n \t\t\tformat.html { redirect_to @internship_position, notice: 'Position was successfully created'}\n \t\t\tformat.json { render json: @internship_position, status: :created, location: @internship_position }\n \t\telse\n \t\t\tformat.html { render action: \"new\" }\n \t\t\tformat.json { render json: @internship_position.errors, status: :unprocessable_entity }\n \t\tend\n \tend\n end",
"def new\n @polygon = Polygon.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @polygon }\n end\n end",
"def new\n @tinymap = Tinymap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @tinymap }\n end\n end",
"def new\n @chef_mapping = ChefMapping.new\n @source = []\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @chef_mapping }\n end\n end",
"def create\n position = Position.new(params.permit(:address))\n \n if position.save\n render json: position, status: :created\n else\n render json: position.errors, status: :unprocessable_entity\n end\n end",
"def create\n @location = Geolocation.new(params[:geolocation])\n @scene = Scene.new(:title => params[:title])\n @location.scenes << @scene\n @location.save\n @scene.save\n\n respond_to do |format|\n if @location.save\n format.html { redirect_to @location, notice: 'Location was successfully create.'}\n format.json { render json: @location, status: :created, location: @location }\n else\n format.html { render action: \"new\"}\n format.json { render json: @location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n do_new_resource\n get_project_if_exists\n do_set_attributes\n do_authorize_instance\n\n # initialize lat/lng to Brisbane-ish\n @site.longitude = 152\n @site.latitude = -27\n respond_to do |format|\n format.html\n format.json { respond_new }\n end\n end",
"def new\n #@location_hash = params[:location]\n @name = params[:name]\n @longitude = params[:longitude]\n @latitude = params[:latitude]\n end",
"def new_location\n current_user.update_location(params[:longitude], params[:latitude])\n @room = Room.first\n respond_to do |format|\n format.js {render :nothing => true }\n end\n end",
"def create\n @position_mapper = PositionMapper.new(params[:position_mapper])\n\n respond_to do |format|\n if @position_mapper.save\n format.html { redirect_to @position_mapper, notice: 'Position mapper was successfully created.' }\n format.json { render json: @position_mapper, status: :created, location: @position_mapper }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position_mapper.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @map = Map.new\n @map.user = current_user\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @map }\n end\n end",
"def new\n @record = Location.new\n @networks = current_user.entity.networks\n @location_types = Location.location_types\n\n respond_to do |format|\n format.html {render :layout => 'popup'} \n format.json { \n response = {}\n render json: response \n }\n end\n end",
"def new\n @position_member = PositionMember.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @position_member }\n end\n end",
"def new\n @image_position_template = ImagePositionTemplate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @image_position_template }\n end\n end",
"def create\n @loc = current_user.locs.new(params[:loc])\n\n respond_to do |format|\n if @loc.save\n format.html { redirect_to @loc, notice: 'Loc was successfully created.' }\n format.json { render json: @loc, status: :created, location: @loc }\n else\n format.html { render action: \"new\" }\n format.json { render json: @loc.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @coordinate = Coordinate.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @coordinate }\n end\n end",
"def create\n @template_position = TemplatePosition.new(template_position_params)\n\n respond_to do |format|\n if @template_position.save\n format.html { redirect_to @template_position, notice: 'Template position was successfully created.' }\n format.json { render :show, status: :created, location: @template_position }\n else\n format.html { render :new }\n format.json { render json: @template_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def coordinates\n location = Location.new(location_params)\n if location.save\n render json: location\n else\n render json: \"ERROR\"\n end\n end",
"def new\n @finanzposition = Finanzposition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @finanzposition }\n end\n end",
"def new\n @localmap = Localmap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @localmap }\n end\n end",
"def new\n respond_to do |format|\n stage = Stage.new(:project_id => params[:project_id], :name => params[:name], :action => params[:action_name])\n begin\n stage.update_position(params[:position]) \n format.json { \n render json: stage\n }\n rescue\n format.json { \n render :json => {:errors => stage.errors}, :status => :unprocessable_entity \n }\n end\n end\n end",
"def new\n @admin_geonode = Admin::Geonode.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @admin_geonode }\n end\n end",
"def new\n @location = Location.new\n respond_to do |format|\n format.html\n format.xml { render :xml => @location }\n format.json { render :text => @location.to_json }\n end\n end",
"def new\n if params[:latitude] and params[:longitude]\n @spot = Spot.new(:latitude => params[:latitude], :longitude => params[:longitude])\n @zoom = 14\n else\n @spot = Spot.new(:latitude => Spot::DEFAULT_POSITION[:latitude], :longitude => Spot::DEFAULT_POSITION[:longitude])\n @zoom = Spot::DEFAULT_POSITION[:zoom]\n end\n\n respond_to do |format|\n format.js\n format.html # new.html.erb\n format.xml { render :xml => @spot }\n end\n end",
"def new\n @map = Map.find(params[:map_id])\n\n #@node = Node.new\n @node = @map.nodes.build\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @node }\n end\n end",
"def new\n #TODO\n @map = Map.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @map }\n end\n end",
"def create\n @curpg = :admintools\n @location = Location.new(params[:location])\n\n respond_to do |format|\n if @location.save\n format.html { redirect_to :controller => \"locations\", :action => \"index\" }\n format.json { render json: @location, status: :created, location: @location }\n else\n format.html { render action: \"new\" }\n format.json { render json: @location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @spawner = Spawner.new\n @fieldtrips = Fieldtrip.all\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @spawner }\n end\n end",
"def new\n r = Ring.instance\n @creature = Creature.new(latitude: r.latitude, longitude: r.longitude)\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @creature }\n end\n end",
"def create\n @position = @device.positions.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to device_positions_path(@device, @position), notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n\t @location = Location.new\n\t @months = Month.all\n\t @countries = Location.all.map(&:country).uniq\n\n\t respond_to do |format|\n\t format.html # new.html.erb\n\t format.json { render json: @location }\n\t end\n\t end",
"def new\n @smallmap = Smallmap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @smallmap }\n end\n end",
"def new\n @mini_map_road = MiniMapRoad.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @mini_map_road }\n end\n end",
"def new\n @locacao = Locacao.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render :json => @locacao }\n end\n end",
"def new\n @point = Point.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @point }\n end\n end",
"def new\n @gmap = Gmap.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @gmap }\n end\n end",
"def create\n @geocoded_location = GeocodedLocation.new(geocoded_location_params)\n\n respond_to do |format|\n if @geocoded_location.save\n format.html { redirect_to :root, notice: 'Geocoded location was successfully created.' }\n format.json { render :show, status: :created, location: @geocoded_location }\n else\n format.html { render :new }\n format.json { render json: @geocoded_location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @location = Location.new(\n name: location_params[:name],\n address_line_1: location_params[:lineOne],\n address_line_2: location_params[:lineTwo],\n lat: location_params[:lat],\n lng: location_params[:lng]\n )\n if @location.save\n render json: @location\n else\n render json: {message: 'creation failed'}\n end\n end",
"def create\n @user_position = UserPosition.new(params[:user_position])\n\n respond_to do |format|\n if @user_position.save\n format.html { redirect_to @user_position, notice: 'User position was successfully created.' }\n format.json { render json: @user_position, status: :created, location: @user_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n \n respond_to do |format|\n if @position.save\n format.html { redirect_to(@position, :notice => 'Position was successfully created.') }\n format.xml { render :xml => @position, :status => :created, :location => @position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @initial_marker = InitialMarker.new(marker_params)\n\n respond_to do |format|\n format.html { redirect_to new_marker_url(:lat => @initial_marker.lat,\n :lon => @initial_marker.lon), notice: 'Marker was successfully created.' }\n format.json { render :show, status: :created, location: @initial_marker }\n end\n end",
"def new\n @zone = Zone.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @zone }\n end\n end",
"def new\n place = Place.new\n\n render json: place\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if params[:session_new_button]\n @position.save\n Activity.create(content: \"#{@position.job_title}\", action: \"created\", office: \"#{@position.office}\", user_name: \"#{@position.user_name}\", link: \"#{@position.id}\")\n format.html { redirect_to edit_position_path(@position) }\n format.json { head :no_content }\n else\n if @position.save\n Activity.create(content: \"#{@position.job_title}\", action: \"created\", office: \"#{@position.office}\", user_name: \"#{@position.user_name}\", link: \"#{@position.id}\")\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def new\n @poblamiento_import_location = PoblamientoImportLocation.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @poblamiento_import_location }\n end\n end",
"def new\n if current_user.is_not_member?\n flash[:error] = \"You do not have permissions to access that feature.\"\n redirect_to root_path and return\n end\n \n @location_history = LocationHistory.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @location_history }\n end\n end",
"def new\n @place = Place.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @place }\n end\n end"
] | [
"0.7227565",
"0.707564",
"0.707564",
"0.707564",
"0.68282133",
"0.66331935",
"0.6611183",
"0.65948105",
"0.6525071",
"0.64507747",
"0.6448005",
"0.63941914",
"0.6375476",
"0.63684785",
"0.6366236",
"0.63476855",
"0.6338557",
"0.6329629",
"0.62621737",
"0.6220882",
"0.6209307",
"0.6201274",
"0.6198762",
"0.61847156",
"0.6180231",
"0.6164069",
"0.61630535",
"0.61555904",
"0.61555904",
"0.61555904",
"0.61555904",
"0.61555904",
"0.6148925",
"0.6145487",
"0.61440593",
"0.6134198",
"0.61243606",
"0.61117905",
"0.609928",
"0.60952276",
"0.6091541",
"0.6091541",
"0.609061",
"0.6089235",
"0.6067786",
"0.60630006",
"0.605984",
"0.605984",
"0.60594296",
"0.6048931",
"0.60416293",
"0.6039008",
"0.60367066",
"0.60134715",
"0.6007059",
"0.600524",
"0.5989565",
"0.5986424",
"0.59794766",
"0.5978744",
"0.59715176",
"0.59354085",
"0.5925805",
"0.5920974",
"0.5918613",
"0.59125835",
"0.5909324",
"0.59083426",
"0.5893583",
"0.58922523",
"0.58850926",
"0.5871657",
"0.5871002",
"0.58699673",
"0.5869439",
"0.58608276",
"0.5857672",
"0.5851794",
"0.5845533",
"0.5837759",
"0.58206624",
"0.5820368",
"0.58181465",
"0.5812572",
"0.58110297",
"0.5803174",
"0.5802993",
"0.580157",
"0.5798822",
"0.5797471",
"0.5794742",
"0.5786786",
"0.57841235",
"0.5782887",
"0.5779462",
"0.57737464",
"0.57690775",
"0.5766798",
"0.576141",
"0.5759146"
] | 0.76666987 | 0 |
POST /character/geo_positions POST /character/geo_positions.json | def create
@character_geo_position = Character::GeoPosition.new(params[:character_geo_position])
respond_to do |format|
if @character_geo_position.save
format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }
format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }
else
format.html { render action: "new" }
format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def correct_geo_coords\n coords = params.seek :geo, :geojson, :geometry, :coordinates\n if coords\n array = GeosHelper.geo_coords_to_array(coords)\n params[:geo][:geojson][:geometry][:coordinates] = array\n end\n end",
"def map_coordinates\n loc = find_or_create_geolocation(params)\n respond_to do |format|\n @geolocation_boxes = GeolocationBox.only_geo_bbox(resource.id)\n @geolocation_box = loc.geolocation_box\n format.js { render template: 'stash_datacite/geolocation_boxes/create.js.erb' }\n end\n end",
"def create\n begin\n @new_position = @@data_util.hash_data_to_upper_case(params[:position], ['description'])\n @new_position[:createdby] = session[:username]\n @new_position[:mypclient_id] = session[:client_id]\n\n @position = Position.new(@new_position)\n if @position.save\n @@request_result[:success] = true\n @@request_result[:notice] =\"Position was successfully created.\"\n else\n @@request_result[:errormsg] = @position.errors.full_messages[0]\n end\n rescue Exception => e\n @@request_result[:errormsg] = e.message\n end\n render json: @@request_result\n end",
"def new\n @character_geo_position = Character::GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def create\n @position = @device.positions.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to device_positions_path(@device, @position), notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n position = Position.new(params.permit(:address))\n \n if position.save\n render json: position, status: :created\n else\n render json: position.errors, status: :unprocessable_entity\n end\n end",
"def update\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n if @character_geo_position.update_attributes(params[:character_geo_position])\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geo = Geo.new(geo_params)\n\n respond_to do |format|\n if @geo.save\n format.html { redirect_to @geo, notice: 'Geo was successfully created.' }\n format.json { render :show, status: :created, location: @geo }\n else\n format.html { render :new }\n format.json { render json: @geo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def create\n @text_position = TextPosition.new(params[:text_position])\n\n respond_to do |format|\n if @text_position.save\n format.html { redirect_to @text_position, notice: 'Text position was successfully created.' }\n format.json { render json: @text_position, status: :created, location: @text_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @text_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @patient = Patient.new(patient_params)\n\n respond_to do |format|\n if @patient.save\n Position.create!(patient_id: @patient.id,\n latitude: Geocoder.coordinates(@patient.address)[0] ,\n longitude: Geocoder.coordinates(@patient.address)[1])\n format.html { redirect_to @patient, notice: 'Patient was successfully created.' }\n format.json { render :show, status: :created, location: @patient }\n else\n format.html { render :new }\n format.json { render json: @patient.errors, status: :unprocessable_entity }\n end\n end\n end",
"def coordinates\n location = Location.new(location_params)\n if location.save\n render json: location\n else\n render json: \"ERROR\"\n end\n end",
"def create\n @crew_position = CrewPosition.new(params[:crew_position])\n\n respond_to do |format|\n if @crew_position.save\n format.html { redirect_to @crew_position, :notice => 'Crew position was successfully created.' }\n format.json { render :json => @crew_position, :status => :created, :location => @crew_position }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @crew_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n \t@internship_position = InternshipPosition.new(internship_position_params)\n\n \trespond_to do |format|\n \t\tif @internship_position.save\n \t\t\tformat.html { redirect_to @internship_position, notice: 'Position was successfully created'}\n \t\t\tformat.json { render json: @internship_position, status: :created, location: @internship_position }\n \t\telse\n \t\t\tformat.html { render action: \"new\" }\n \t\t\tformat.json { render json: @internship_position.errors, status: :unprocessable_entity }\n \t\tend\n \tend\n end",
"def create\n @user_position = UserPosition.new(params[:user_position])\n\n respond_to do |format|\n if @user_position.save\n format.html { redirect_to @user_position, notice: 'User position was successfully created.' }\n format.json { render json: @user_position, status: :created, location: @user_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @user_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n positions = JSON.parse(params['positions'])\n\n # Fill in body to initialize the game and return a 200 response\n\n render plain: \"OK\"\n end",
"def create\n @location = Geolocation.new(params[:geolocation])\n @scene = Scene.new(:title => params[:title])\n @location.scenes << @scene\n @location.save\n @scene.save\n\n respond_to do |format|\n if @location.save\n format.html { redirect_to @location, notice: 'Location was successfully create.'}\n format.json { render json: @location, status: :created, location: @location }\n else\n format.html { render action: \"new\"}\n format.json { render json: @location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @template_position = TemplatePosition.new(template_position_params)\n\n respond_to do |format|\n if @template_position.save\n format.html { redirect_to @template_position, notice: 'Template position was successfully created.' }\n format.json { render :show, status: :created, location: @template_position }\n else\n format.html { render :new }\n format.json { render json: @template_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position_mapper = PositionMapper.new(params[:position_mapper])\n\n respond_to do |format|\n if @position_mapper.save\n format.html { redirect_to @position_mapper, notice: 'Position mapper was successfully created.' }\n format.json { render json: @position_mapper, status: :created, location: @position_mapper }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position_mapper.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @positions = Position.all.order :id\n @geojson = Hash.new\n\n @points = Position.geopoints\n @geojson = {\n type: 'FeatureCollection',\n features: @points\n }\n\n respond_to do |format|\n format.html\n format.json { render json: @geojson } # respond with the created JSON object\n end\n end",
"def create\n @post = Post.new(post_params)\n @post.is_open = true\n @post.is_solved = false\n\n #if post_params[:include_location]==\"1\"\n # @post.city=\"Santiago\"\n # @post.country=\"Chile\"\n # puts(\"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nBieeen\\n\\n\\n\\n\\n\\n\\n\\n\")\n # puts(params[:location][\"latitude\"])\n @post.gps_location=\"Latitude: #{params[:location][\"latitude\"]} | Longitude: #{params[:location][\"longitude\"]}\"\n location = Location.create(name: \"wiwi\",latitude: params[:location][\"latitude\"], longitude: params[:location][\"longitude\"])\n @post.location=location\n #end\n\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to root_path, notice: 'Post was successfully created.' }\n format.json { render :show, status: :created, location: @post }\n else\n # format.html { render :new }\n format.html { redirect_to root_path, alert: 'Error creating post. Recalls that both the length of the title and the content of the post must be at least 5 characters long.' }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n \n respond_to do |format|\n if @position.save\n format.html { redirect_to(@position, :notice => 'Position was successfully created.') }\n format.xml { render :xml => @position, :status => :created, :location => @position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @unko_position = UnkoPosition.new(params[:unko_position])\n\n # 既に同じデバイスからの入力がないか確認\n @previous_unko_position = UnkoPosition.find_by_device_id(@unko_position.device_id)\n @previous_unko_position.delete if !@previous_unko_position.nil?\n\n respond_to do |format|\n if @unko_position.save\n format.html { redirect_to @unko_position, notice: 'Unko position was successfully created.' }\n format.json { render json: @unko_position, status: :created, location: @unko_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @unko_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @lost_pet = LostPet.new(params[:lost_pet])\n @lost_pet.coordinate = ParseGeoPoint.new :latitude => params[:latitude].to_f, :longitude => params[:longitude].to_f\n respond_to do |format|\n if @lost_pet.save\n format.html { redirect_to @lost_pet, notice: 'Lost pet was successfully created.' }\n format.json { render json: @lost_pet, status: :created, location: @lost_pet }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lost_pet.errors, status: :unprocessable_entity }\n end\n end\n end",
"def position_params\n json_params = ActionController::Parameters.new(JSON.parse(request.body.read))\n json_params.require(:position).permit(:address, :longitude, :latitude)\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to(admin_position_path(@position), :notice => 'Position was successfully created.') }\n format.xml { render :xml => @position, :status => :created, :location => @position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @entried_position = EntriedPosition.new(entried_position_params)\n\n respond_to do |format|\n if @entried_position.save\n format.html { redirect_to @entried_position, notice: 'Entried position was successfully created.' }\n format.json { render action: 'show', status: :created, location: @entried_position }\n else\n format.html { render action: 'new' }\n format.json { render json: @entried_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_positions\n create_player_position\n create_target_position\n create_select_position\n create_object_position\n end",
"def create\n @post_geos = params[:post_geos].split(\"\\n\").map{|line| line.blank? ? nil : line.strip}.compact\n current_user.settings.post_geos = @post_geos\n\n respond_to do |format|\n format.html { redirect_to [:client,:post_geos], notice: 'Post category was successfully created.' }\n format.json { render json: @post_geo, status: :created, location: @post_geo }\n end\n end",
"def create\n puts \"WORK IT GURL\"\n @question = Question.new(params[:question])\n @question.user_id = current_user.id\n geocode = Geocoder.coordinates(@question.location)\n @question.lat = geocode[0].to_s\n @question.lng = geocode[1].to_s\n respond_to do |format|\n if @question.save\n @question.insert_location(@question.lat + ', ' + @question.lng)\n format.html { redirect_to @question, notice: 'Question was successfully created.' }\n format.json { render json: @question, status: :created, location: @question }\n else\n format.html { render action: \"new\" }\n format.json { render json: @question.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = User.create(position_params)\n render json: @position, status: :accepted\n end",
"def create\n @post_code = PostCode.new(params[:post_code])\n @towns = Town.find :all \n @geo_positions = GeoPosition.find :all\n \n respond_to do |format|\n if @post_code.save\n flash[:notice] = 'PostCode was successfully created.'\n format.html { redirect_to(@post_code) }\n format.xml { render :xml => @post_code, :status => :created, :location => @post_code }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @post_code.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n\n has = params[\"area\"].to_json\n data_has = JSON.parse(has)\n @zone = Zone.new();\n @zone.nombre_zona = params[\"nombre_zona\"]\n @zone.color = params[\"color\"]\n\n respond_to do |format|\n if @zone.save\n \n data_has.each do |geo|\n @coordenada = CoordinateZone.new()\n geo.each do |data|\n @coordenada.zone_id = @zone.id\n @coordenada.latitud = data[\"lat\"].to_f\n @coordenada.longitud = data[\"lng\"].to_f \n end\n @coordenada.save\n end\n\n format.html { redirect_to @zone, notice: 'Zone was successfully created.' }\n format.js \n # format.js { render js: \"window.location.href=#{ directories_path }\" }\n format.json { render :show, status: :created, location: @zone }\n \n else\n format.html { render :new }\n format.json { render json: @zone.errors, status: :unprocessable_entity }\n end\n\n end\n end",
"def create\n respond_to do |format|\n if position.save\n format.html { redirect_to( position, flash: { success: 'Position created.' } ) }\n format.xml { render xml: position, status: :created, location: position }\n else\n format.html { render action: \"new\" }\n format.xml { render xml: position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to positions_path(anchor: \"row-#{@position.id}\"), notice: 'Position was successfully created.' }\n else\n format.html { render :new }\n end\n end\n end",
"def create\n @order_position = OrderPosition.new(order_position_params)\n\n respond_to do |format|\n if @order_position.save\n format.html { redirect_to @order_position, notice: 'Order position was successfully created.' }\n format.json { render :show, status: :created, location: @order_position }\n else\n format.html { render :new }\n format.json { render json: @order_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def geolocation_params\n params.require(:geolocation).permit(:address, :latitude, :longitude, :borough, :neighborhood, :created_at, :updated_at)\n end",
"def create\n @admin_position = Position.new(admin_position_params)\n\n respond_to do |format|\n if @admin_position.save\n format.html { redirect_to admin_positions_path, notice: 'Position was successfully created.' }\n else\n format.html { render :new }\n end\n end\n end",
"def create\n @proposal_position = ProposalPosition.new(proposal_position_params)\n\n respond_to do |format|\n if @proposal_position.save\n format.html { redirect_to @proposal_position, notice: 'Proposal position was successfully created.' }\n format.json { render :show, status: :created, location: @proposal_position }\n else\n format.html { render :new }\n format.json { render json: @proposal_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geocoded_location = GeocodedLocation.new(geocoded_location_params)\n\n respond_to do |format|\n if @geocoded_location.save\n format.html { redirect_to :root, notice: 'Geocoded location was successfully created.' }\n format.json { render :show, status: :created, location: @geocoded_location }\n else\n format.html { render :new }\n format.json { render json: @geocoded_location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @managers_position = Managers::Position.new(managers_position_params)\n\n respond_to do |format|\n if @managers_position.save\n format.html { redirect_to @managers_position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @managers_position }\n else\n format.html { render :new }\n format.json { render json: @managers_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @place = Place.new(params[:place])\n @place.apply_geo(params[:coordinates])\n \n respond_to do |format|\n if @place.save\n format.html { redirect_to @place, notice: I18n.t('views.messages.places.notifications.create') }\n format.json { render json: @place, status: :created, location: @place }\n else\n format.html { render action: \"new\" }\n format.json { render json: @place.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position_state = PositionState.new(position_state_params)\n\n respond_to do |format|\n if @position_state.save\n format.html { redirect_to @position_state, notice: 'Position state was successfully created.' }\n format.json { render :show, status: :created, location: @position_state }\n else\n format.html { render :new }\n format.json { render json: @position_state.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n if geo_params\n point = @user.geopoints.build\n point[:lat] = @geopoint[:lat]\n point[:lon] = @geopoint[:lon]\n point[:battery] = @geopoint[:battery]\n point[:time] = @geopoint[:time]\n if point.save\n @user.update(lat: point[:lat], lon: point[:lon], time: point[:time])\n #geopoint saved successfuly\n render json: \"ok\", status: 200\n else\n #validate error\n render json: \"validate_error\", status: 422\n end\n else\n #NotFound user\n render json: \"empty_error\", status: 423\n end\nend",
"def create\n @position = Position.new(params[:position])\n @categories = Category.find(:all)\n\n respond_to do |format|\n if @position.save\n flash[:notice] = 'Position was successfully created.'\n format.html { redirect_to(@position) }\n format.xml { render :xml => @position, :status => :created, :location => @position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @part_position = PartPosition.new(part_position_params)\n\n respond_to do |format|\n if @part_position.save\n format.html { redirect_to @part_position, notice: 'Part position was successfully created.' }\n format.json { render :show, status: :created, location: @part_position }\n else\n format.html { render :new }\n format.json { render json: @part_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position = Position.new(params[:position])\n\n respond_to do |format|\n if params[:session_new_button]\n @position.save\n Activity.create(content: \"#{@position.job_title}\", action: \"created\", office: \"#{@position.office}\", user_name: \"#{@position.user_name}\", link: \"#{@position.id}\")\n format.html { redirect_to edit_position_path(@position) }\n format.json { head :no_content }\n else\n if @position.save\n Activity.create(content: \"#{@position.job_title}\", action: \"created\", office: \"#{@position.office}\", user_name: \"#{@position.user_name}\", link: \"#{@position.id}\")\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def create\n @geo_datum = GeoDatum.new(geo_datum_params)\n\n respond_to do |format|\n if @geo_datum.save\n format.html { redirect_to @geo_datum, notice: 'Geo datum was successfully created.' }\n format.json { render :show, status: :created, location: @geo_datum }\n else\n format.html { render :new }\n format.json { render json: @geo_datum.errors, status: :unprocessable_entity }\n end\n end\n end",
"def postLocation( location_id, type, country, language, name, formal_name, resolution, population, description, timezone, latitude, longitude, parent_town, parent_county, parent_province, parent_region, parent_neighbourhood, parent_district, postalcode, searchable_id, searchable_ids)\n params = Hash.new\n params['location_id'] = location_id\n params['type'] = type\n params['country'] = country\n params['language'] = language\n params['name'] = name\n params['formal_name'] = formal_name\n params['resolution'] = resolution\n params['population'] = population\n params['description'] = description\n params['timezone'] = timezone\n params['latitude'] = latitude\n params['longitude'] = longitude\n params['parent_town'] = parent_town\n params['parent_county'] = parent_county\n params['parent_province'] = parent_province\n params['parent_region'] = parent_region\n params['parent_neighbourhood'] = parent_neighbourhood\n params['parent_district'] = parent_district\n params['postalcode'] = postalcode\n params['searchable_id'] = searchable_id\n params['searchable_ids'] = searchable_ids\n return doCurl(\"post\",\"/location\",params)\n end",
"def create\n @contest_position = ContestPosition.new(contest_position_params)\n\n respond_to do |format|\n if @contest_position.save\n format.html { redirect_to @contest_position, notice: 'Contest position was successfully created.' }\n format.json { render action: 'show', status: :created, location: @contest_position }\n else\n format.html { render action: 'new' }\n format.json { render json: @contest_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def active_positions\n authenticated_post(\"auth/positions\").body\n end",
"def move\n # TODO: Check validity of params, they can be spoofed\n # (And log cheaters.)\n\n # TODO: All the following and more\n # Validate acceptable movement here\n #if acceptable_move == true\n if true == true\n @x = (@character.xloc = @character.xloc + params[:x].to_i)\n @y = (@character.yloc = @character.yloc + params[:y].to_i)\n @rad = @character.view\n @character.view = 7\n @character.save\n end\n\n # FUTURE: Move companions\n # Companions/etc\n\n # TODO: Delays based on terrain\n\n\n @world_map = WorldMap.new\n #render 'relocate'\n #render :template => 'relocate.js.erb'\n respond_to do |format|\n format.js { render :layout => false }\n end\n\n\n #@world_map = WorldMap.find(params[:id])\n #\n #respond_to do |format|\n # if @world_map.update_attributes(params[:world_map])\n # format.html { redirect_to @world_map, notice: 'World map was successfully updated.' }\n # format.json { head :no_content }\n # else\n # format.html { render action: \"edit\" }\n # format.json { render json: @world_map.errors, status: :unprocessable_entity }\n # end\n #end\n end",
"def create\n @gig = current_user.gigs.new(gig_params)\n location = Location.find_or_create_by({\n postcode_address: params[:location][:postcode_address],\n street_address: params[:location][:street_address]\n })\n @gig.location = location\n respond_to do |format|\n if @gig.save\n format.html { redirect_to @gig, notice: 'Gig was successfully created.' }\n format.json { render :show, status: :created, location: @gig }\n else\n format.html { render :new }\n format.json { render json: @gig.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @post = Post.new(post_params)\n @post.user_id=params[:post][:user_id]\n @post.save\n \n\n \n @point = ['0', '0', '0', '0', '0']\n \n for i in 0..4\n @point[i] = Point.new\n end\n \n \n for i in 0..4\n @latitude = \"latitude\" + i.to_s\n @longitude = \"longitude\" + i.to_s\n @point_title = \"point_title\" + i.to_s\n @content = \"content\" + i.to_s\n @cardimg = \"cardimg\" + i.to_s\n \n if(@longitude)\n \n @point[i].latitude = params[:post][@latitude]\n @point[i].longitude = params[:post][@longitude]\n @point[i].point_title = params[:post][@point_title]\n @point[i].content = params[:post][@content]\n @point[i].cardimg = params[:post][@cardimg]\n @point[i].post_id = @post.id\n @point[i].save\n \n else\n \n end\n \n end\n\n respond_to do |format|\n if @post.save\n format.html { redirect_to @post, notice: 'Post was successfully created.' }\n format.json { render :show, status: :created, location: @post }\n else\n format.html { render :new }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n \n \n end\n end\n end",
"def position_params\n params.require(:position).permit(:name)\n end",
"def position_params\n params.require(:position).permit(:name)\n end",
"def postEntityGeopoint( entity_id, longitude, latitude, accuracy)\n params = Hash.new\n params['entity_id'] = entity_id\n params['longitude'] = longitude\n params['latitude'] = latitude\n params['accuracy'] = accuracy\n return doCurl(\"post\",\"/entity/geopoint\",params)\n end",
"def entried_position_params\n params.require(:entried_position).permit(:entry_id, :latitude, :longitude, :label, :owner_id)\n end",
"def create\n \n @user = User.find(params[:user_id])\n @position = @user.positions.build(params[:position])\n\n respond_to do |format|\n if @position.save\n flash[:notice] = 'Position was successfully created.'\n format.html { redirect_to(@user) }\n format.xml { render :xml => @position, :status => :created, :location => @position }\n else\n @committees = Committee.all(:order => :code)\n @roles = []\n if !params[:position][:committee_id].blank?\n @roles = Role.all(:condition => [\"committee_id = ?\", params[:position][:committee_id]])\n end\n\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @position = @portfolio.positions.build(position_params)\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to portfolio_positions_path(@portfolio), notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @map = Map.new(params[:map])\n @map.name = \"Map Created #{Time.now.strftime('at %I:%M%p')}\" if @map.name.blank?\n @map.description = \"No Description Provided\" if @map.description.blank?\n if points = params['markers']\n points.each do |marker_identifier, point|\n @map.points.build(:lat => point['lat'].to_f,:lng => point['lng'].to_f,:description =>point['description'], :marker_identifier => marker_identifier)\n end\n end\n respond_to do |format|\n if @map.save\n format.html { redirect_to(@map, :notice => 'Map was successfully created.') }\n else\n format.html { render :action => \"new\" }\n end\n end\n end",
"def create\n @coord = @member.build_coord(coord_params)\n \n respond_to do |format|\n if @coord.save\n format.html { redirect_to new_member_adhesion_url(@member), notice: 'Coordonnées enregistrées' }\n format.json { render json: @coord, status: :created, location: @coord }\n else\n format.html { render action: \"new\" }\n format.json { render json: @coord.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @itinerary = Itinerary.new(itinerary_params)\n @start_loc = Location.create(address: params[:start_loc], is_origin: true, itinerary_id: @itinerary.id)\n @end_loc = Location.create(address: params[:end_loc], is_origin: false, itinerary_id: @itinerary.id)\n @itinerary.locations.push(@start_loc)\n @itinerary.locations.push(@end_loc)\n\n respond_to do |format|\n if @itinerary.save\n format.html { redirect_to @itinerary, notice: 'Itinerary was successfully created.' }\n format.json { render :show, status: :created, location: @itinerary }\n\n # origin = @itinerary.locations.new( address: params[:origin], is_origin: true )\n # origin.get_coords\n\n # destination = @itinerary.locations.new( address: params[:destination], is_origin: false )\n # destination.get_coords\n\n else\n format.html { render :new }\n format.json { render json: @itinerary.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @location = Location.new(\n name: location_params[:name],\n address_line_1: location_params[:lineOne],\n address_line_2: location_params[:lineTwo],\n lat: location_params[:lat],\n lng: location_params[:lng]\n )\n if @location.save\n render json: @location\n else\n render json: {message: 'creation failed'}\n end\n end",
"def map_pois\n restaurant_positions = RestaurantPositions.new(map_params)\n\n expose restaurant_positions.call,\n each_serializer: RestaurantPositionSerializer\n end",
"def create\n @executive_position = ExecutivePosition.new(params[:executive_position])\n\n respond_to do |format|\n if @executive_position.save\n flash[:notice] = 'ExecutivePosition was successfully created.'\n format.html { redirect_to(@executive_position) }\n format.xml { render :xml => @executive_position, :status => :created, :location => @executive_position }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @executive_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n map = Map.new;\n map.user_id = session[:user]['id']\n map.name = params['name']\n map.map_json = params['map_json']\n\n if map.save\n render json: {save: true, message: 'zapisano poprawnie mapę', callback: 'map'}\n else\n render json: {save: false, message: 'błąd podczas zapisu mapy', callback: 'map'}\n end\n end",
"def create\n @position = Position.new(position_params)\n respond_to do |format|\n if @position.save\n @position.create_images(params[:images]) if params[:images]\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @position }\n else\n format.html { render :new }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_position(pos)\n return nil if position_already_saved? pos\n positions = read_positions\n positions << pos.to_i\n p positions\n positions.sort!\n positions.join(\"\\n\")\n open(\"current_position.json\", \"w\") do |f|\n f.puts positions\n end\n true\nend",
"def create\n @hrms_position = Hrms::Position.new(hrms_position_params)\n\n respond_to do |format|\n if @hrms_position.save\n format.html { redirect_to @hrms_position, notice: 'Position was successfully created.' }\n format.json { render :show, status: :created, location: @hrms_position }\n else\n format.html { render :new }\n format.json { render json: @hrms_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_positions\n (1..GAME_BOARD_LENGTH).each do |y|\n (1..GAME_BOARD_LENGTH).each do |x|\n self.positions.new(x: x, y: y)\n end\n end\n set_ships\n save\n end",
"def create\n @place.user = current_user\n\n @select_markers = return_markers\n\n puts params[:map_marker]\n\n\n respond_to do |format|\n if @place.save\n Log.logit!(:places, :notice, \"User created place \" + @place.name, {:user_id => @current_user.id, :place_id => @place.id})\n format.html { redirect_to @place, notice: 'Place was successfully created.' }\n format.json { render json: @place, status: :created, location: @place }\n else\n format.html { render action: \"new\" }\n format.json { render json: @place.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n delete_keys = params.keys.select{|k| k =~ /^delete-/}\n direction = 'created'\n if delete_keys.empty?\n locations = Location.find_near(params[:x], params[:y])\n @location = locations.empty? ?\n Location.create(:x => params[:x], :y => params[:y]) :\n locations.first\n @ministry.locations << @location\n else\n logger.info delete_keys.first\n delete, x, y = delete_keys.first.split('-')\n logger.info \"X: #{x}, Y: #{y}\"\n locations = Location.find_near(x, y)\n locations.each{|l| @ministry.locations.delete(l)}\n direction = 'deleted'\n end\n\n respond_to do |format|\n if @ministry.save\n flash[:notice] = \"Ministry location was successfully #{direction}.\"\n format.html { redirect_to(edit_ministry_path(@ministry)) }\n format.xml { render :xml => @ministry, :status => :created, :location => @ministry }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @ministry.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n names = params[:positions]\n event_volunteer_id = params[:event_volunteer_id]\n\n if names.is_a?(Array) && names.length < 4 && event_volunteer_id.present?\n result_status = true\n result = []\n success_positions = []\n each_index = 0\n names.each_with_index do |position, index|\n each_index = index\n if result_status\n begin\n EventVolPosition.transaction do\n evp = EventVolPosition.create!(name: position, event_volunteer_id: event_volunteer_id)\n result << \"#{position}!\"\n success_positions << {name: position, id: evp.id}\n end\n rescue Exception => ex\n result_status = false\n result << ex.message\n end\n else\n break\n end\n end\n if result_status\n result = [true, '提交成功', success_positions]\n else\n if each_index > 0\n result = [true, (result[0..-2]+['剩余职位添加失败']).join(','), success_positions]\n else\n result = [false, result[0]]\n end\n end\n else\n result = [false, '参数不完整']\n end\n\n respond_to do |format|\n format.json { render json: {status: result[0], message: result[1], success_positions: result[2]} }\n if result[0]\n format.html { redirect_to [:admin, @event_vol_position], notice: @event_vol_position.name+'创建成功' }\n\n else\n format.html { render action: 'new' }\n end\n end\n end",
"def create\n @administrative_division = AdministrativeDivision.new(params[:administrative_division])\n @countries = Country.find :all\n @geo_positions = GeoPosition.find :all\n respond_to do |format|\n if @administrative_division.save\n flash[:notice] = 'AdministrativeDivision was successfully created.'\n format.html { redirect_to(@administrative_division) }\n format.xml { render :xml => @administrative_division, :status => :created, :location => @administrative_division }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @administrative_division.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @input = Input.new(input_params)\n\n respond_to do |format|\n if @input.save\n @file = @input.input_file.download\n data = CSV.parse(@file, headers: true)\n # TODO S in SOLID\n coords_ary = []\n data.each do |i|\n # iterate each row (address), call geolocate api for each\n # to get lat-lon coords\n i = i.to_h\n zip = i[\"Zipcode\"]\n addr = i[\"Address\"]\n town = i[\"Town\"]\n state = i[\"State\"]\n # POST or GET\n response = RestClient.get \"https://app.geocodeapi.io/api/v1/search?apikey=#{ENV['GEOLOCATION_KEY']}&text=#{addr},#{town},#{state},#{zip},United States\"\n parsed = JSON.parse(response)\n coords = parsed[\"features\"][0][\"geometry\"][\"coordinates\"].reverse\n puts \"coordinates for #{addr}, #{town} #{state}, #{zip}\"\n puts coords\n coords_ary << coords\n File.write('response.json', parsed)\n end\n osrm_api(coords_ary)\n format.html { redirect_to @input, notice: 'Input was successfully created.' }\n format.json { render :show, status: :created, location: @input }\n else\n format.html { render :new }\n format.json { render json: @input.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @department_position = DepartmentPosition.new(department_position_params)\n\n respond_to do |format|\n if @department_position.save\n format.html { redirect_to @department_position, notice: 'Department position was successfully created.' }\n format.json { render :show, status: :created, location: @department_position }\n else\n format.html { render :new }\n format.json { render json: @department_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n geos = params[:post_geos].split(\"\\n\").map{|line| line.blank? ? nil : line.strip}.compact\n current_user.settings.post_geos = geos\n\n respond_to do |format|\n format.html { redirect_to client_post_geos_path, notice: 'Post geos was successfully updated.' }\n format.json { head :no_content }\n end\n end",
"def create\n @floor = Floor.new(floor_params)\n\n respond_to do |format|\n if @floor.save\n editorData = @floor.editor_data\n editorData = editorData.split(\"<END>\").map{|i| i.split(\"<NEXT>\")}\n editorData.each do |obj|\n if obj[0] == 'submain'\n object = Room.new(name: obj[1],\n description: obj[2],\n capacity: obj[3],\n computers: obj[4],\n roomtype_id: Roomtype.all.where(id: obj[5].to_i)[0],\n floor_id: @floor.id)\n object.save\n elsif obj[0] == 'main'\n object = @floor\n end\n polygon = Polygon.new(imageable: object)\n polygon.save\n points = obj[6].split(\" \").map{|i| i.split(\",\")}\n for i in 0 ... points.size\n point = Point.create(ox: points[i][0].to_f,\n oy: points[i][1].to_f,\n priority: i,\n polygon: polygon\n )\n point.save\n end\n end\n\n format.html { redirect_to @floor, notice: t('flash.floor.create') }\n format.json { render :show, status: :created, location: @floor }\n else\n format.html { render :new }\n format.json { render json: @floor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @geometry = Geometry.new(geometry_params)\n\n respond_to do |format|\n if @geometry.save\n format.html { redirect_to @geometry, notice: 'Geometry was successfully created.' }\n format.json { render action: 'show', status: :created, location: @geometry }\n else\n format.html { render action: 'new' }\n format.json { render json: @geometry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @patient.update(patient_params)\n @patient.positions.first.update(\n latitude: Geocoder.coordinates(@patient.address)[0] ,\n longitude: Geocoder.coordinates(@patient.address)[1])\n format.html { redirect_to @patient, notice: 'Patient was successfully updated.' }\n format.json { render :show, status: :ok, location: @patient }\n else\n format.html { render :edit }\n format.json { render json: @patient.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_fake_pos( turn, prof, fake_positions_code_names )\n fake_positions_code_names << prof.current_location_code_name\n @data[ turn ] ||= {}\n @data[ turn ][ :fake_pos ] ||= fake_positions_code_names\n @data[ turn ][ :fake_pos ] = fake_positions_code_names & @data[ turn ][ :fake_pos ]\n validate_fake_pos_with_previous_turn_data( turn )\n @cached_data = nil\n end",
"def show\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def create\n location = GeoIP.new('lib/GeoLiteCity.dat').city(current_user.current_sign_in_ip)\n # location = GeoIP.new('lib/GeoLiteCity.dat').city('110.136.133.185')\n idea_params[:lat].blank? ? idea_params[:lat] << location.latitude.to_s : idea_params[:lat]\n idea_params[:long].blank? ? idea_params[:long] << location.longitude.to_s : idea_params[:long]\n idea_params[:region_name].blank? ? idea_params[:region_name] << location.region_name : idea_params[:region_name]\n idea_params[:country].blank? ? idea_params[:country] << location.country_name : idea_params[:country]\n idea_params[:city].blank? ? idea_params[:city] << location.city_name : idea_params[:city]\n @idea = Idea.new(idea_params)\n\n respond_to do |format|\n if @idea.save\n format.html { redirect_to @idea, notice: 'Idea was successfully created.' }\n format.json { render action: 'show', status: :created, location: @idea }\n else\n format.html { render action: 'new' }\n format.json { render json: @idea.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @position_member = PositionMember.new(params[:position_member])\n\n respond_to do |format|\n if @position_member.save\n @user = User.find(@position_member.user_id)\n @user.access_level = 30\n @user.save!\n format.html { redirect_to(@position_member, :notice => 'Position member was successfully created.') }\n format.xml { render :xml => @position_member, :status => :created, :location => @position_member }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @position_member.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @post = @user.posts.create(post_params)\n params[:post][:locations_attributes].each do |location|\n location = Location.where(address: location[1][:address], lat: location[1][:lat], lng: location[1][:lng]).first_or_initialize\n location.save\n @post.pins.new(location_id: location.id).save\n end\n if @post.save\n flash[:notice] = \"Post was successfully created.\"\n redirect_to [@post.user, @post]\n else\n render :new\n end\n end",
"def create \n\t\trobot_position = RobotMovement.new(check_robot,robots_commands).robot_movement\n\t\trender json: { location: robot_position }\n\tend",
"def create\n super\n location = request.location\n @user.latitude = location.latitude\n @user.longitude = location.longitude\n @user.save\n end",
"def create\n @support = find_support\n puts @position.inspect\n @position.support = @support unless @support.nil?\n\n respond_to do |format|\n if @position.save\n format.html { redirect_to @position, notice: 'Position was successfully created.' }\n format.json { render json: @position, status: :created, location: @position }\n else\n @support ||= @position.support\n klass = @position.support_type.nil? ? AccountUnit : @position.support_type.constantize\n @supports = klass.all\n format.html { render action: \"new\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def post_in_geofence(lat_coordinate, lng_coordinate) # coordinates are strings\n if !geofence_id.nil?\n require 'net/http'\n require 'json'\n begin\n uri = URI('https://api.fencer.io/v1.0/position/inside/' + Geofence.find(geofence_id).key)\n http = Net::HTTP.new(uri.host, uri.port)\n http.use_ssl = true\n headers = {'Authorization' => '5195ba22-ab3c-52f6-bab9-d15ff7b8794e', 'Lat-Pos' => lat_coordinate.to_s, 'Lng-Pos' => lng_coordinate.to_s}\n req = Net::HTTP::Get.new(uri.path, headers)\n res = http.request(req)\n resp = res.body\n resp_in_js = JSON.parse(resp)\n answer = resp_in_js[\"data\"][\"inside\"]\n end\n else\n true\n end\n end",
"def create\n @positioncoefficient = Positioncoefficient.new(params[:positioncoefficient])\n\n respond_to do |format|\n if @positioncoefficient.save\n format.html { redirect_to(@positioncoefficient, :notice => 'Positioncoefficient was successfully created.') }\n format.xml { render :xml => @positioncoefficient, :status => :created, :location => @positioncoefficient }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @positioncoefficient.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n Search.create(search_params)\n\n render json: JSON.parse(get_job_positions.body)\n end",
"def geo_params\n params.require(:geo).permit(:country, :expectancy)\n end",
"def addPoints coordinates\n coordinates.split(',').each_with_index do |coordinate, index|\n border_points.build(:latitude => coordinate[0..coordinate.index(' ')-1], :longitude => coordinate[coordinate.index(' ')+1 .. coordinate.length-1], :local_index => index)\n end\n end",
"def create\n @part_position = PartPosition.new(part_position_params)\n # authorize(@part_position)\n\n respond_to do |format|\n if @part_position.save\n format.html { redirect_to @part_position, notice: 'Part position was successfully created.' }\n format.json { render :show, status: :created, location: @part_position }\n else\n format.html { render :new }\n format.json { render json: @part_position.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.60359406",
"0.602981",
"0.5953773",
"0.5920963",
"0.5917561",
"0.5895335",
"0.5880196",
"0.5863509",
"0.5862524",
"0.5862524",
"0.5851815",
"0.58228683",
"0.581096",
"0.5748399",
"0.57393765",
"0.573566",
"0.5720172",
"0.5692822",
"0.5682632",
"0.56799394",
"0.5667723",
"0.56573933",
"0.56536853",
"0.56315166",
"0.5603347",
"0.5586931",
"0.5573444",
"0.5564778",
"0.55237234",
"0.55088985",
"0.5499389",
"0.549449",
"0.5493162",
"0.5481033",
"0.54805285",
"0.54784745",
"0.5474246",
"0.54723555",
"0.54491055",
"0.5444207",
"0.54152405",
"0.5413865",
"0.53738666",
"0.5362644",
"0.53133243",
"0.5293721",
"0.5289486",
"0.52876794",
"0.52792114",
"0.527842",
"0.5260349",
"0.52504885",
"0.5245415",
"0.52399945",
"0.5232689",
"0.52142566",
"0.52026266",
"0.5197149",
"0.51863843",
"0.5180961",
"0.51635706",
"0.5153056",
"0.514355",
"0.51341474",
"0.5126401",
"0.5104875",
"0.5104841",
"0.509942",
"0.50942975",
"0.50938797",
"0.5093388",
"0.5090886",
"0.507247",
"0.50494057",
"0.504851",
"0.50468886",
"0.5046719",
"0.5044497",
"0.5036412",
"0.5022417",
"0.5021342",
"0.50194144",
"0.5016307",
"0.5012443",
"0.50052065",
"0.50044894",
"0.49956128",
"0.4995535",
"0.49953577",
"0.49797663",
"0.49784732",
"0.4966177",
"0.49640945",
"0.49625745",
"0.4962516",
"0.495905",
"0.49559018",
"0.49399275",
"0.4934526",
"0.4921213"
] | 0.73566043 | 0 |
PUT /character/geo_positions/1 PUT /character/geo_positions/1.json | def update
@character_geo_position = Character::GeoPosition.find(params[:id])
respond_to do |format|
if @character_geo_position.update_attributes(params[:character_geo_position])
format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }
end
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create\n @character_geo_position = Character::GeoPosition.new(params[:character_geo_position])\n\n respond_to do |format|\n if @character_geo_position.save\n format.html { redirect_to @character_geo_position, notice: 'Geo position was successfully created.' }\n format.json { render json: @character_geo_position, status: :created, location: @character_geo_position }\n else\n format.html { render action: \"new\" }\n format.json { render json: @character_geo_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @patient.update(patient_params)\n @patient.positions.first.update(\n latitude: Geocoder.coordinates(@patient.address)[0] ,\n longitude: Geocoder.coordinates(@patient.address)[1])\n format.html { redirect_to @patient, notice: 'Patient was successfully updated.' }\n format.json { render :show, status: :ok, location: @patient }\n else\n format.html { render :edit }\n format.json { render json: @patient.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def destroy\n @character_geo_position = Character::GeoPosition.find(params[:id])\n @character_geo_position.destroy\n\n respond_to do |format|\n format.html { redirect_to character_geo_positions_url }\n format.json { head :ok }\n end\n end",
"def update\n @position = Position.find(params[:id])\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @position }\n else\n format.html { render :edit }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @position }\n else\n format.html { render :edit }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to @position, notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @position }\n else\n format.html { render :edit }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @unko_position = UnkoPosition.find(params[:id])\n\n respond_to do |format|\n if @unko_position.update_attributes(params[:unko_position])\n format.html { redirect_to @unko_position, notice: 'Unko position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @unko_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n respond_to do |format|\n if @location.update_attribute(params[:geolocation])\n format.html { redirect_to @location, notice: 'Location was successfully updated.'}\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @location.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_location(params)\n @client.put(\"#{path}/location\", nil, params, \"Content-Type\" => \"application/json\")\n end",
"def correct_geo_coords\n coords = params.seek :geo, :geojson, :geometry, :coordinates\n if coords\n array = GeosHelper.geo_coords_to_array(coords)\n params[:geo][:geojson][:geometry][:coordinates] = array\n end\n end",
"def update\n begin\n @position = Position.find(params[:id]);\n @updated_position = @@data_util.hash_data_to_upper_case(params[:position], ['description'])\n @updated_position[:lastupdateby] = session[:username]\n\n if @position.update_attributes(@updated_position)\n @@request_result[:success] = true\n @@request_result[:notice] =\"Position was successfully updated.\"\n else\n @@request_result[:errormsg] = @position.errors.full_messages[0]\n end\n rescue Exception => e\n @@request_result[:errormsg] = e.message\n end\n render json: @@request_result\n end",
"def update\n respond_to do |format|\n if @geo.update(geo_params)\n format.html { redirect_to @geo, notice: 'Geo was successfully updated.' }\n format.json { render :show, status: :ok, location: @geo }\n else\n format.html { render :edit }\n format.json { render json: @geo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @character.update(character_params)\n render json: @character, status: 201, location: @character\n else\n render json: @character.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to device_positions_path(@position.device), notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @position }\n else\n format.html { render :edit }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n \t@internship_position = InternshipPosition.find(params[:id])\n\n \trespond_to do |format|\n \t\tif @internship_position.update_attributes(internship_position_params)\n \t\t\tformat.html { redirect_to @internship_position, notice: 'Posiiton was successfully updated.' }\n \t\t\tformat.json { head :no_content }\n \t\telse\n \t\t\tformat.html { render action: \"edit\" }\n \t\t\tformat.json { render json: @internship_position.errors, status: :unprocessable_entity }\n \t\tend\n \tend\n end",
"def update\n @character = Character.get(params[:id])\n params[:character] = params[:character].map { |x,y| [x, y.empty? ? nil : y ] }.to_hash\n\n respond_to do |format|\n if @character.update(params[:character])\n format.html { redirect_to([:administration, @character], :notice => 'Character was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @character.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @user_position = UserPosition.find(params[:id])\n\n respond_to do |format|\n if @user_position.update_attributes(params[:user_position])\n format.html { redirect_to @user_position, notice: 'User position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @user_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @crew_position = CrewPosition.find(params[:id])\n\n respond_to do |format|\n if @crew_position.update_attributes(params[:crew_position])\n format.html { redirect_to @crew_position, :notice => 'Crew position was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.json { render :json => @crew_position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if position.save\n format.html { redirect_to( position, flash: { success: 'Position updated.' } ) }\n format.xml { head :ok }\n else\n format.html { render action: \"edit\" }\n format.xml { render xml: position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n geos = params[:post_geos].split(\"\\n\").map{|line| line.blank? ? nil : line.strip}.compact\n current_user.settings.post_geos = geos\n\n respond_to do |format|\n format.html { redirect_to client_post_geos_path, notice: 'Post geos was successfully updated.' }\n format.json { head :no_content }\n end\n end",
"def update_location\n suggestion = Suggestion.find(params[:id])\n if admin or suggestion.user == @current_user\n suggestion.lat = params[:lat]\n suggestion.lon = params[:lon]\n suggestion.save\n end\n render :text => suggestion.to_json\n end",
"def update\n @position = Position.find(params[:id])\n \n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to(@position, :notice => 'Position was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @text_position = TextPosition.find(params[:id])\n\n respond_to do |format|\n if @text_position.update_attributes(params[:text_position])\n format.html { redirect_to @text_position, notice: 'Text position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @text_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n attrs = town_params\n attrs.delete(:coordinates)\n coordinates = eval(town_params[:coordinates] || '')\n\n if @town.update_attributes(attrs)\n @town.update(coordinates: coordinates)\n format.html { redirect_to @town, notice: 'Town was successfully updated.' }\n format.json { render :show, status: :ok, location: @town }\n else\n format.html { render :edit }\n format.json { render json: @town.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to positions_path, notice: 'Position was successfully updated.' }\n else\n format.html { render :edit }\n end\n end\n end",
"def update\n respond_to do |format|\n if @template_position.update(template_position_params)\n format.html { redirect_to @template_position, notice: 'Template position was successfully updated.' }\n format.json { render :show, status: :ok, location: @template_position }\n else\n format.html { render :edit }\n format.json { render json: @template_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n update_helper(@position, position_params)\n end",
"def update\n @place = Place.find(params[:id])\n @place.apply_geo(params[:coordinates])\n\n respond_to do |format|\n if @place.update_attributes(params[:place])\n format.html { redirect_to @place, notice: I18n.t('views.messages.places.notifications.update') }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @place.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @feature_id = args[:feature_id] if args.key?(:feature_id)\n @geocoding_address = args[:geocoding_address] if args.key?(:geocoding_address)\n @kg_mid = args[:kg_mid] if args.key?(:kg_mid)\n @position = args[:position] if args.key?(:position)\n @rect = args[:rect] if args.key?(:rect)\n end",
"def update\n character = Character.find params[:id]\n\n # Before creating a new posession, check if it exists already and if it does just update the X and Y values.\n\n # find the checkboxes from params accessory[id][]\n # inside this will be an array of checked accessories.\n # for each of these, create a new Posession\n @posessions = params[:accessory][:id]\n @posessions.each do |p|\n if p.present?\n @positions = params[:positions]\n @x_pos = 0\n @y_pos = 0\n\n @positions.each do |key, value|\n if key.include? p\n # they match, find the X and Y values.\n if key.include? \"x\"\n @x_pos = value\n elsif key.include? \"y\"\n @y_pos = value\n end\n end\n end\n\n # check if posession doesnt already exist.\n if character.posessions.exists?(accessory_id: p)\n # find the posession and remove it from the character\n oldposession = character.posessions.where(accessory_id: p)\n oldposession.each do |item|\n if @x_pos != \"30\" && @y_pos != \"500\"\n character.posessions.delete(item)\n # associate a new posession with the updated values.\n new_posession = Posession.create :accessory_id => p, :character_id => character.id, :x_pos => @x_pos, :y_pos => @y_pos\n character.posessions << new_posession\n end\n end\n # if the posession already existed, check if the values have been moved and if they havent then do not remove, nor create a new entry.\n else\n # associate a new posession with the updated values.\n new_posession = Posession.create :accessory_id => p, :character_id => character.id, :x_pos => @x_pos, :y_pos => @y_pos\n character.posessions << new_posession\n end\n\n\n end\n\n # change users coins based on accessories selected.\n @coins = params[:userCoins]\n @current_user.coins = @coins\n @current_user.save\n end\n\n character.update params_character\n redirect_to character\n end",
"def update_geo_places\n unless(params[:place] && params[:topic_id] && params[:location_id])\n render_error \"incorrect params\"\n return\n end\n\n @topic = Topic.find(params[:topic_id])\n if current_user.guardian.ensure_can_edit!(@topic)\n render status: :forbidden, json: false\n return\n end\n\n# passing in location_id might be redundant as with updates, place should already have location_id\n location_id = params[:location_id].to_s\n # below is a workaround check while I still have some places in db\n # that are arrays\n # binding.pry\n # if @topic.geo.places.class == Array\n unless (@topic.geo.places.class == Hash) && (@topic.geo.places['sorted_ids'])\n @topic.geo.places = {\n 'sorted_ids' => []\n }\n @topic.geo.save\n end\n\n unless @topic.geo.places['sorted_ids'].include? location_id\n @topic.geo.places['sorted_ids'].push location_id\n end\n\n place = JSON.parse params[:place]\n# place param is now passed as a string ( json.stringyfy client side)\n# - allowing jquery to process the payload resulted in nested arrays getting converted to hashes\n# http://stackoverflow.com/questions/25856959/parsing-json-with-jquery-turns-array-into-hash\n# with json parse above, probably don't need below check anymore\n place['longitude'] = place['longitude'].to_f\n place['latitude'] = place['latitude'].to_f\n place['location_id'] = location_id\n\n # should do some checks to prevent injection attacks\n @topic.geo.places[location_id] = place\n # topic_places = @topic.geo.places || []\n\n @topic.geo.save!\n return render_json_dump @topic.geo.as_json\n\n end",
"def update\n @position_mapper = PositionMapper.find(params[:id])\n\n respond_to do |format|\n if @position_mapper.update_attributes(params[:position_mapper])\n format.html { redirect_to @position_mapper, notice: 'Position mapper was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @position_mapper.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @geometry.update(geometry_params)\n format.html { redirect_to @geometry, notice: 'Geometry was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @geometry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n location = Location.find_or_create_by({\n postcode_address: params[:location][:postcode_address],\n street_address: params[:location][:street_address]\n })\n @gig.location = location\n respond_to do |format|\n if @gig.update(gig_params)\n format.html { redirect_to @gig, notice: 'Gig was successfully updated.' }\n format.json { render :show, status: :ok, location: @gig }\n else\n format.html { render :edit }\n format.json { render json: @gig.errors, status: :unprocessable_entity }\n end\n\n end\n end",
"def update_expert_location\n @exp = Expert.find(params[:expert_id])\n #@job = Job.find(params[:job_id])\n if @exp.update_attributes(:latitude => params[:lat], :longitude => params[:lon])\n render :json=> {:success => true, :lat => @exp.latitude, :lon => @exp.longitude}, :status=>200\n else\n render :json=> {:success => false}, :status=>204\n end \n end",
"def update\n @position = Position.find(params[:id])\n\n respond_to do |format|\n if @position.update_attributes(params[:position])\n format.html { redirect_to(admin_position_path, :notice => 'Position was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @entried_position.update(entried_position_params)\n format.html { redirect_to @entried_position, notice: 'Entried position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @entried_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @proposal_position.update(proposal_position_params)\n format.html { redirect_to @proposal_position, notice: 'Proposal position was successfully updated.' }\n format.json { render :show, status: :ok, location: @proposal_position }\n else\n format.html { render :edit }\n format.json { render json: @proposal_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if spot = SkateSpot.where(:id => params[:id]).first and info = parse_input(params)\n if info.has_key?(:alt_names)\n info[:alt_names].map! { |n| SpotName.find_or_create_by(:name => n, :spot => spot) }\n end\n\n if info[:name]\n info[:name] = SpotName.find_or_create_by(:name => info[:name], :spot => spot) \n end\n\n if info.has_key?(:obstacles)\n info[:obstacles].map! { |o|\n Obstacle.find_or_create_by(:type => ObstacleType.where(:name => o['type']).first, :geometry => o['geometry'], :spot => spot)\n }\n end\n\n render :json => { :status => (spot.update(info) == true ? :success : :unprocessable_entity) }\n else\n render :json => { :status => :unprocessable_entity }\n end\n\n rescue ActiveRecord::RecordNotSaved\n render :json => { :status => :unprocessable_entity }\n end",
"def update\n respond_to do |format|\n if @order_position.update(order_position_params)\n format.html { redirect_to @order_position, notice: 'Order position was successfully updated.' }\n format.json { render :show, status: :ok, location: @order_position }\n else\n format.html { render :edit }\n format.json { render json: @order_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_location (new_lonlat)\n @survivor.update_attribute(:lonlat, new_lonlat) \n end",
"def update\n geo = Geocoder.coordinates(@destination.name)\n @destination.lat = geo.first\n @destination.lng = geo.last\n \n respond_to do |format|\n if @destination.update(destination_params)\n format.html { redirect_to @destination, notice: 'Destination was successfully updated.' }\n format.json { render :show, status: :ok, location: @destination }\n else\n format.html { render :edit }\n format.json { render json: @destination.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @character = current_user.characters.find(params[:id])\n @assets = Asset.where(:company_id => current_user.company_id)\n # respond_to do |format|\n if @character.update_attributes(params[:character])\n # solr.update(:character)\n redirect_to @character, notice: 'Character was successfully updated.'\n # format.json { head :no_content }\n else\n render action: \"edit\"\n # format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n # end\n end",
"def update\n respond_to do |format|\n if @position_scene1.update(position_scene1_params)\n format.html { redirect_to @position_scene1, notice: 'Position scene1 was successfully updated.' }\n format.json { render :show, status: :ok, location: @position_scene1 }\n else\n format.html { render :edit }\n format.json { render json: @position_scene1.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @part_position.update(part_position_params)\n format.html { redirect_to @part_position, notice: 'Part position was successfully updated.' }\n format.json { render :show, status: :ok, location: @part_position }\n else\n format.html { render :edit }\n format.json { render json: @part_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @part_position.update(part_position_params)\n format.html { redirect_to @part_position, notice: 'Part position was successfully updated.' }\n format.json { render :show, status: :ok, location: @part_position }\n else\n format.html { render :edit }\n format.json { render json: @part_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n \tparams[:position][:existing_question_attributes] ||= {}\n \t\n @position = Position.find(params[:id])\n respond_to do |format|\n if @position.update_attributes(params[:position])\n flash[:notice] = 'Position was successfully updated.'\n format.html { redirect_to(@position) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @position.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n render json: Location.update(params[\"id\"], params[\"location\"])\n end",
"def update\n respond_to do |format|\n if @recipe.update(recipe_params)\n p = 0\n @recipe.directions.each do |direction|\n direction.position = p\n direction.save!\n p += 1\n end\n\n if @recipe.author.nil? && @current_user\n @recipe.author = @current_user\n @recipe.save\n end\n\n format.html { redirect_to @recipe, notice: 'Recipe was successfully updated.' }\n format.json { render :show, status: :ok, location: @recipe }\n else\n format.html { render :edit }\n format.json { render json: @recipe.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @body_style_product_feature = BodyStyleProductFeature.find(params[:id])\n \n if params[:commit] == 'Move'\n @body_style_product_feature.insert_at params[:body_style_product_feature][:position].to_i\n end\n\n respond_to do |format|\n if @body_style_product_feature.update_attributes(params[:body_style_product_feature])\n format.js\n format.html { redirect_to @body_style_product_feature, notice: 'Body style product feature was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @body_style_product_feature.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n preferred_position = PreferredPosition.find(params[:id])\n if preferred_position.update(preferred_position_params)\n render json: preferred_position, status: 200, location: [:api, preferred_position]\n else\n failed_to_update(preferred_position, \"preferred_position\")\n end\n end",
"def update\n respond_to do |format|\n if @contest_position.update(contest_position_params)\n format.html { redirect_to @contest_position, notice: 'Contest position was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @contest_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n authorize @volunteer_position\n @volunteer_position.update(volunteer_position_params)\n respond_with(@volunteer_position)\n end",
"def update\n\n instructor_geolocation = InstructorGeolocation.new\n instructor_geolocation.address = params[:geolocation]\n instructor_geolocation.longitude = params[:lng]\n instructor_geolocation.latitude = params[:lat]\n instructor_geolocation.street = params[:route]\n instructor_geolocation.street_number = params[:street_number]\n instructor_geolocation.zip = params[:postal_code]\n instructor_geolocation.city = params[:locality]\n instructor_geolocation.country = params[:country]\n instructor_geolocation.state = params[:administrative_area_level_1]\n\n respond_to do |format|\n if @instructor.update(instructor_params)\n instructor_geolocation.instructor_id = @instructor.id\n if instructor_geolocation.save\n format.html { redirect_to @instructor, notice: 'Instructor was successfully updated.' }\n format.json { render :show, status: :ok, location: @instructor }\n else\n format.html { render :new }\n format.json { render json: @instructor_geolocation.errors, status: :unprocessable_entity }\n end\n else\n format.html { render :edit }\n format.json { render json: @instructor.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @feature_id = args[:feature_id] if args.key?(:feature_id)\n @lat_lng = args[:lat_lng] if args.key?(:lat_lng)\n @location_name = args[:location_name] if args.key?(:location_name)\n @mid = args[:mid] if args.key?(:mid)\n @timezone = args[:timezone] if args.key?(:timezone)\n end",
"def update\n @map = Map.find_by_id(params[:id])\n new_points = []\n if points = params['markers']\n points.each do |marker_identifier,point|\n new_points << @map.points.build(:marker_identifier => marker_identifier, :lat => point['lat'], :lng => point['lng'],:description =>point['description'])\n end\n end\n @map.points = new_points \n respond_to do |format|\n if @map.update_attributes(params[:map])\n format.html { redirect_to(@map, :notice => 'Map was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @map.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_geoloc\n # Too simple ! Need to check than every user are contributors for this project.\n # Ok, I admit this is realy not accurate for any security purpose... Curious to see what Novagile think about\n CacheUser.update_geocodes(geoloc_params)\n respond_to do |format|\n format.html { render text: \"Request processed\", status: \"ok\" }\n format.json { render text: {message: \"Request processed\"}, status: :ok}\n end\n end",
"def update\n respond_to do |format|\n if @position.update(position_params)\n format.html { redirect_to portfolio_position_path(@portfolio), notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @position }\n else\n format.html { render :edit }\n format.json { render json: @position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @position_state.update(position_state_params)\n format.html { redirect_to @position_state, notice: 'Position state was successfully updated.' }\n format.json { render :show, status: :ok, location: @position_state }\n else\n format.html { render :edit }\n format.json { render json: @position_state.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @position_member = PositionMember.find(params[:id])\n\n respond_to do |format|\n if @position_member.update_attributes(params[:position_member])\n format.html { redirect_to(@position_member, :notice => 'Position member was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @position_member.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: I18n.translate(\"success\", scope: %i[characters edit]) }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @character = current_user.characters.find(params[:id])\n\n # This has issues with case, resulting in dupes of the same tag\n #@character.tag_list = params[:character][:property] + \", \" + params[:tag_list]\n @character.tag_list = params[:tag_list]\n\n respond_to do |format|\n if @character.update_attributes(character_params)\n\n flash[:notice] = 'Character was successfully updated.'\n format.html { redirect_to(@character) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @character.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_geo_db\n json = JSON.parse params[:features]\n @theme_map = ThemeMap.find(params[:theme_map][:id])\n gjson = RGeo::GeoJSON.decode( json, json_parser: :json )\n gjson.each do |feature|\n line = nil\n f_id = feature.properties[\"id\"]\n if f_id != nil\n line = UserLine.find(f_id)\n else\n line = UserLine.new\n end\n line.text = feature.properties[\"text\"]\n line.number = feature.properties[\"number\"]\n line.amount = feature.properties[\"amount\"]\n line.name = feature.properties[\"name\"]\n line.user_id = @current_user_id\n line.map_layer_id = 100 # @theme_map.interactive_layer_id\n wkt_string = feature.geometry.as_text\n g_factory = RGeo::Cartesian::Factory.new(srid: 3857)\n line.geometry = g_factory.parse_wkt(wkt_string)\n line.save # update_attributes(user_line_params)\n end\n render :js => 'alert(\"saved\");'\n end",
"def update\n @coord = @member.coord\n \n respond_to do |format|\n if @coord.update_attributes(coord_params)\n format.html { redirect_to member_coord_path(@member), notice: 'Coordonnées mises à jour' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @coord.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n admin_id = @coord.admin_id\n respond_to do |format|\n if @coord.update(coord_params)\n format.html { redirect_to coords_path(admin_id: admin_id), notice: 'Coord was successfully updated.' }\n format.json { render :show, status: :ok, location: @coord }\n else\n format.html { render :edit }\n format.json { render json: @coord.errors, status: :unprocessable_entity }\n end\n end\n end",
"def move\n # TODO: Check validity of params, they can be spoofed\n # (And log cheaters.)\n\n # TODO: All the following and more\n # Validate acceptable movement here\n #if acceptable_move == true\n if true == true\n @x = (@character.xloc = @character.xloc + params[:x].to_i)\n @y = (@character.yloc = @character.yloc + params[:y].to_i)\n @rad = @character.view\n @character.view = 7\n @character.save\n end\n\n # FUTURE: Move companions\n # Companions/etc\n\n # TODO: Delays based on terrain\n\n\n @world_map = WorldMap.new\n #render 'relocate'\n #render :template => 'relocate.js.erb'\n respond_to do |format|\n format.js { render :layout => false }\n end\n\n\n #@world_map = WorldMap.find(params[:id])\n #\n #respond_to do |format|\n # if @world_map.update_attributes(params[:world_map])\n # format.html { redirect_to @world_map, notice: 'World map was successfully updated.' }\n # format.json { head :no_content }\n # else\n # format.html { render action: \"edit\" }\n # format.json { render json: @world_map.errors, status: :unprocessable_entity }\n # end\n #end\n end",
"def update\n respond_to do |format|\n if @itinerary.update(itinerary_params)\n format.html { redirect_to @itinerary, notice: 'Itinerary was successfully updated.' }\n format.json { render :show, status: :ok, location: @itinerary }\n\n if params[:origin]\n @itinerary.locations[0].address = params[:origin]\n @itinerary.locations[0].get_coords\n end\n if params[:destination]\n @itinerary.locations[0].address = params[:destination]\n @itinerary.locations[0].get_coords\n end\n\n else\n format.html { render :edit }\n format.json { render json: @itinerary.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n # checks if user is authorized\n if authorise(request)\n # operation parameter tells what put operation should be done on vertex\n operation = params[:operation]\n case operation\n when 'connection'\n update_connection(params[:from_vertex_id], params[:to_vertex_id])\n when 'transformation'\n update_transformation(params[:id], params[:pos_x], params[:pos_y], params[:width],\n params[:height], params[:z_index])\n when 'attribute'\n update_attribute(params[:id], params[:name], params[:estimated_time], params[:clue],\n params[:description])\n else\n render json: { success: false, message: 'Operation does not exist' }, status: :bad_request\n end\n else\n render json: { success: false, message: 'Unauthorized' }, status: 401\n end\n end",
"def update\n respond_to do |format|\n if @ubigeo.update(ubigeo_params)\n format.html { redirect_to @ubigeo, notice: 'Ubigeo was successfully updated.' }\n format.json { render :show, status: :ok, location: @ubigeo }\n else\n format.html { render :edit }\n format.json { render json: @ubigeo.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @has_geolocation = args[:has_geolocation] if args.key?(:has_geolocation)\n end",
"def update\n @game = current_or_guest_user.games.find_by(id: params[:game_id])\n @position = @game.positions.find_by(id: params[:id]) if @game\n\n if @position && !@position.shooted? && !@game.finished?\n @game.shoot_position!(@position)\n render json: @position, status: 200\n else\n render json: { 'error': \"You can't shoot this position.\" }, status: 401\n end\n end",
"def update\n # @character.update character_params\n if @character.update character_params\n render json: {\n status: :updated,\n character: @character\n }\n else\n render json: {\n status: 500,\n errors: @character.errors.full_messages\n }\n end\n end",
"def update\n @place = Place.find(params[:id])\n\n if not @place.address == \"\"\n @place.lat = Geocoder.coordinates(@place.address)[0];\n @place.lon = Geocoder.coordinates(@place.address)[1];\n end\n\n respond_to do |format|\n if @place.update_attributes(params[:place])\n format.html { redirect_to(@place, :notice => 'Place was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @place.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n place = Place.find(params[:id])\n map = Map.find(params[:map_id])\n #place.update(place_params)\n place_attrs = place_params\n #binding.pry\n map.adjust_place(place, place_attrs)\n redirect_to edit_map_path(map)\n end",
"def update\n respond_to do |format|\n if @position_scene3.update(position_scene3_params)\n format.html { redirect_to @position_scene3, notice: 'Position scene3 was successfully updated.' }\n format.json { render :show, status: :ok, location: @position_scene3 }\n else\n format.html { render :edit }\n format.json { render json: @position_scene3.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\n fullImage = params[:committee_position][:image_url].split('/')\n params[:committee_position][:image_url] = fullImage[fullImage.length-1]\n\n respond_to do |format|\n if @committee_position.update(committee_position_params)\n flash[:success] = \"Committee Position Updated.\"\n format.html { redirect_to committee_positions_path}\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @committee_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def change_locations(new_loc_name)\n MOVIEDB.execute(\"UPDATE locations SET location = '#{new_loc_name}' WHERE id = #{@id};\")\n end",
"def new\n @character_geo_position = Character::GeoPosition.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def update\n respond_to do |format|\n if @computer_geometry.update(computer_geometry_params)\n format.html { redirect_to @computer_geometry, notice: 'Computer geometry was successfully updated.' }\n format.json { render :show, status: :ok, location: @computer_geometry }\n else\n format.html { render :edit }\n format.json { render json: @computer_geometry.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @geo_coordinates = args[:geo_coordinates] if args.key?(:geo_coordinates)\n @label = args[:label] if args.key?(:label)\n @location = args[:location] if args.key?(:location)\n end",
"def update_geo_location\n self.class.update_geo_location(self)\n self.reload\n end",
"def update\n respond_to do |format|\n if @managers_position.update(managers_position_params)\n format.html { redirect_to @managers_position, notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @managers_position }\n else\n format.html { render :edit }\n format.json { render json: @managers_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_my_location\n current_user.update(latitude: params[:latitude], longitude: params[:longitude])\n end",
"def update\n\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @geometry_id = args[:geometry_id] if args.key?(:geometry_id)\n end",
"def update_worldmap\n if data = (@environment[\"gps\"] || @environment[\"network\"])\n worldmap_request \"PUT\", \"/hoc\", data.to_json\n end\n end",
"def show\n @character_geo_position = Character::GeoPosition.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @character_geo_position }\n end\n end",
"def update\n if params[:place_id]\n @safe_house = Place.find(params[:place_id])\n place_id_present = true\n end\n\n respond_to do |format|\n if place_id_present && @safe_house.update_attributes( :name => params[:name],\n :zombie_probability => params[:zombie_probability],\n :latitude => params[:latitude],\n :longitude => params[:longitude],\n :has_weapons => params[:has_weapons],\n :has_food => params[:has_food],\n :has_people => params[:has_people])\n format.json { render :json => { :status => \"OK\", :response => {:updated => true} }}\n else\n format.json { render :json => { :status => \"Error\", :response => {} }}\n end\n end\n end",
"def update\n respond_to do |format|\n if @hrms_position.update(hrms_position_params)\n format.html { redirect_to @hrms_position, notice: 'Position was successfully updated.' }\n format.json { render :show, status: :ok, location: @hrms_position }\n else\n format.html { render :edit }\n format.json { render json: @hrms_position.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @proximity.update(proximity_params)\n format.html { redirect_to @proximity, notice: 'Proximity was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @proximity.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @character.update(character_params)\n format.html { redirect_to @character, notice: 'Character was successfully updated.' }\n format.json { render :show, status: :ok, location: @character }\n else\n format.html { render :edit }\n format.json { render json: @character.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @geographic_item.update(geographic_item_params)\n format.html { redirect_to @geographic_item.metamorphosize, notice: 'Geographic item was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @geographic_item.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update!(**args)\n @places = args[:places] if args.key?(:places)\n end",
"def update\n @loc = current_user.locs.find(params[:id])\n\n respond_to do |format|\n if @loc.update_attributes(params[:loc])\n format.html { redirect_to @loc, notice: 'Loc was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @loc.errors, status: :unprocessable_entity }\n end\n end\n end"
] | [
"0.6205665",
"0.6190675",
"0.6161236",
"0.600425",
"0.5979568",
"0.59693027",
"0.59693027",
"0.59693027",
"0.5958358",
"0.5930068",
"0.59215546",
"0.5919159",
"0.5916072",
"0.5859566",
"0.58403295",
"0.5826592",
"0.5757375",
"0.575664",
"0.5751904",
"0.56965977",
"0.56790584",
"0.5674439",
"0.56731474",
"0.5668756",
"0.5653508",
"0.56516504",
"0.5646425",
"0.5643384",
"0.563644",
"0.5623411",
"0.5610142",
"0.5609218",
"0.5606633",
"0.5593821",
"0.5586342",
"0.5584632",
"0.5580278",
"0.5580026",
"0.55615497",
"0.5559614",
"0.5538332",
"0.55092037",
"0.5508708",
"0.5505787",
"0.54961246",
"0.5483366",
"0.54805857",
"0.54805857",
"0.5473367",
"0.5469657",
"0.54605275",
"0.54596126",
"0.5441726",
"0.54416496",
"0.5431972",
"0.54293054",
"0.54255396",
"0.542258",
"0.54186445",
"0.5416736",
"0.541482",
"0.5412231",
"0.5404542",
"0.5402901",
"0.53975064",
"0.53812814",
"0.5376585",
"0.53660244",
"0.535894",
"0.53544104",
"0.5353765",
"0.535349",
"0.5351672",
"0.53487164",
"0.53481275",
"0.534367",
"0.533852",
"0.53354037",
"0.5333147",
"0.5328338",
"0.5316461",
"0.53075486",
"0.53075194",
"0.5306196",
"0.53053784",
"0.5301167",
"0.5291461",
"0.5282935",
"0.52735347",
"0.52727026",
"0.5269461",
"0.52689487",
"0.5257019",
"0.5257019",
"0.5257019",
"0.5257019",
"0.5257019",
"0.5255602",
"0.5253359",
"0.52511173"
] | 0.72673297 | 0 |
DELETE /character/geo_positions/1 DELETE /character/geo_positions/1.json | def destroy
@character_geo_position = Character::GeoPosition.find(params[:id])
@character_geo_position.destroy
respond_to do |format|
format.html { redirect_to character_geo_positions_url }
format.json { head :ok }
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def destroy\n @geo.destroy\n respond_to do |format|\n format.html { redirect_to geos_url, notice: 'Geo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @post_geo = current_user.post_geos.find(params[:id])\n @post_geo.destroy\n\n respond_to do |format|\n format.html { redirect_to [:client, :post_geos] }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo_datum.destroy\n respond_to do |format|\n format.html { redirect_to geo_data_url, notice: 'Geo datum was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geometry.destroy\n respond_to do |format|\n format.html { redirect_to geometries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @position.destroy\n\n respond_to do |format|\n format.html { redirect_to positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @unko_position = UnkoPosition.find(params[:id])\n @unko_position.destroy\n\n respond_to do |format|\n format.html { redirect_to unko_positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location.destroy\n\n respond_to do |format|\n format.html { redirect_to geolocation_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @ubigeo.destroy\n respond_to do |format|\n format.html { redirect_to ubigeos_url, notice: 'Ubigeo was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geopoint = Geopoint.find(params[:id])\n @geopoint.destroy\n\n respond_to do |format|\n format.html { redirect_to geopoints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geographic_item.destroy\n respond_to do |format|\n format.html { redirect_to geographic_items_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position_mapper = PositionMapper.find(params[:id])\n @position_mapper.destroy\n\n respond_to do |format|\n format.html { redirect_to position_mappers_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @admin_geonode = Admin::Geonode.find(params[:id])\n @admin_geonode.destroy\n\n respond_to do |format|\n format.html { redirect_to admin_geonodes_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geocoded_location = GeocodedLocation.find(params[:id])\n @geocoded_location.destroy\n\n respond_to do |format|\n format.html { redirect_to :root, notice: 'Geocoded location was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @entried_position.destroy\n respond_to do |format|\n format.html { redirect_to entried_positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position = Position.find(params[:id])\n @position.destroy\n\n respond_to do |format|\n format.html { redirect_to positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n admin_id = @coord.admin_id\n @coord.destroy\n respond_to do |format|\n format.html { redirect_to coords_path(admin_id: admin_id), notice: 'Coord was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position = Position.find(params[:id])\n @position.destroy\n respond_to do |format|\n format.html { redirect_to positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @trein_coord_pedag.destroy\n respond_to do |format|\n format.html { redirect_to trein_coord_pedags_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n get_map\n @map.destroy\n\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def delete\n render json: Location.delete(params[\"id\"])\n end",
"def destroy\n @geoname = Geoname.find(params[:id])\n @geoname.destroy\n\n respond_to do |format|\n format.html { redirect_to(geonames_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @contest_position.destroy\n respond_to do |format|\n format.html { redirect_to contest_positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position_scene1.destroy\n respond_to do |format|\n format.html { redirect_to position_scene1s_url, notice: 'Position scene1 was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @locationmap = Locationmap.find(params[:id])\n @locationmap.destroy\n\n respond_to do |format|\n format.html { redirect_to locationmaps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @loc = current_user.locs.find(params[:id])\n @loc.destroy\n\n respond_to do |format|\n format.html { redirect_to locs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geo_country = Geo::Country.find(params[:id])\n @geo_country.destroy\n\n respond_to do |format|\n format.html { redirect_to(geo_countries_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @coordinate.destroy\n respond_to do |format|\n format.html { redirect_to coordinates_url, notice: 'Coordinate was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @user_position = UserPosition.find(params[:id])\n @user_position.destroy\n\n respond_to do |format|\n format.html { redirect_to user_positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @location = Location.find(params[:id])\r\n @location.destroy\r\n\r\n respond_to do |format|\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n device = @position.device\n @position.destroy\n respond_to do |format|\n format.html { redirect_to device_positions_url(device), notice: 'Position was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @polygon = Polygon.find(params[:id])\n @polygon.destroy\n\n respond_to do |format|\n format.html { redirect_to polygons_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @crew_position = CrewPosition.find(params[:id])\n @crew_position.destroy\n\n respond_to do |format|\n format.html { redirect_to crew_positions_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @position.destroy\n respond_to do |format|\n format.html { redirect_to positions_url, notice: 'Position was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position.destroy\n respond_to do |format|\n format.html { redirect_to positions_url, notice: 'Position was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position.destroy\n respond_to do |format|\n format.html { redirect_to positions_url, notice: 'Position was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\r\n @location = Location.find(params[:id])\r\n RemovedLocation.create(server_id: Integer(params[:id]))\r\n directory = Rails.root.join('app','assets','locations');\r\n\r\n path = File.join(directory, @location.image)\r\n File.delete(path)\r\n @location.destroy\r\n mv = MapsVersion.first\r\n mv.version = mv.version+1\r\n mv.save\r\n respond_to do |format|\r\n format.html { redirect_to locations_url }\r\n format.json { head :no_content }\r\n end\r\n end",
"def destroy\n @location_point = LocationPoint.find(params[:id])\n @location_point.destroy\n\n respond_to do |format|\n format.html { redirect_to location_points_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tinymap = Tinymap.find(params[:id])\n @tinymap.destroy\n\n respond_to do |format|\n format.html { redirect_to tinymaps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location = Location.find(params[:id])\n #@client = Client.find(@location.client_ids)\n #@contact = Contact.find(@location.contact_ids)\n \n @location.destroy\n\n respond_to do |format|\n \n format.html { redirect_to request.referer }\n format.json { head :no_content }\n end\n end",
"def destroy\n @text_position = TextPosition.find(params[:id])\n @text_position.destroy\n\n respond_to do |format|\n format.html { redirect_to text_positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @maplocation.destroy\n respond_to do |format|\n format.html { redirect_to maplocations_url, notice: 'Maplocation was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to kanzume_maps_path(kanzume_id: @kanzume.id), notice: 'Map was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @character_entry.destroy\n respond_to do |format|\n format.html { redirect_to character_entries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @character.destroy\n respond_to do |format|\n format.html { redirect_to characters_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geocach = Geocach.find(params[:id])\n @geocach.destroy\n\n respond_to do |format|\n format.html { redirect_to geocaches_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n preferred_position = PreferredPosition.find(params[:id])\n preferred_position.destroy\n head 204\n end",
"def destroy\n @position = Position.find(params[:id])\n Activity.create(content: \"#{@position.job_title}\", action: \"deleted\", office: \"#{@position.office}\", user_name: \"#{@position.user_name}\", link: \"#{@position.id}\")\n @position.destroy\n\n respond_to do |format|\n format.html { redirect_to positions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gethotelstaticdatagd = Gethotelstaticdatagd.find(params[:id])\n @gethotelstaticdatagd.destroy\n\n respond_to do |format|\n format.html { redirect_to gethotelstaticdatagds_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @lat_lon.destroy\n respond_to do |format|\n format.html { redirect_to lat_lons_url, notice: 'Lat lon was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @geofence.destroy\n respond_to do |format|\n format.html { redirect_to geofences_url, notice: 'Geofence was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @record = Location.find(params[:id])\n @record.trash\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @totem_location.destroy\n respond_to do |format|\n format.html { redirect_to totem_locations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @proximity.destroy\n respond_to do |format|\n format.html { redirect_to proximities_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map = Map.find(params[:id])\n @map.destroy\n\n respond_to do |format|\n format.html { redirect_to maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n #@items_location = ItemsLocation.find(params[:id])\n #@items_location.destroy\n\n respond_to do |format|\n format.html { redirect_to items_locations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @coordinate = coordinate_for_current_user params[:id]\n @coordinate.destroy\n\n respond_to do |format|\n format.html { redirect_to track_coordinates_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @smallmap = Smallmap.find(params[:id])\n @smallmap.destroy\n\n respond_to do |format|\n format.html { redirect_to smallmaps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @maze.destroy\n respond_to do |format|\n format.html { redirect_to mazes_url, notice: 'Labirinto destruído com sucesso.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n \t@internship_position = InternshipPosition.find(params[:id])\n \t@internship_position.destroy\n\n \trespond_to do |format|\n \t\tformat.html { redirect_to root_path, notice: 'Posiiton was successfully destroyed.' }\n \t\tformat.json { head :no_content }\n \tend\n end",
"def destroy\n @specific_location.destroy\n respond_to do |format|\n format.html { redirect_to specific_locations_url, notice: 'Specific location was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @business_geopoint.destroy\n respond_to do |format|\n format.html { redirect_to business_geopoints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @world_map = WorldMap.find(params[:id])\n @world_map.destroy\n\n respond_to do |format|\n format.html { redirect_to world_maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gpsquest.destroy\n respond_to do |format|\n format.html { redirect_to gpsquests_url, notice: 'Gpsquest was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @apartment_geopoint.destroy\n respond_to do |format|\n format.html { redirect_to apartment_geopoints_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @dimgeom = Dimgeom.find(params[:id])\n @dimgeom.destroy\n\n respond_to do |format|\n format.html { redirect_to dimgeoms_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location_url_map = LocationUrlMap.find(params[:id])\n @location_url_map.destroy\n\n respond_to do |format|\n format.html { redirect_to location_url_maps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @cordinate.destroy\n respond_to do |format|\n format.html { redirect_to cordinates_url, notice: 'Cordinate was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @empathy_map_record.destroy\n respond_to do |format|\n format.html { redirect_to empathy_map_records_url, notice: 'Empathy map record was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @osm.destroy\n respond_to do |format|\n format.html { redirect_to osms_url, notice: 'Osm was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @position_scene3.destroy\n respond_to do |format|\n format.html { redirect_to position_scene3s_url, notice: 'Position scene3 was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @exposition = Exposition.find(params[:id])\n @exposition.destroy\n\n respond_to do |format|\n format.html { redirect_to expositions_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @processed_location = ProcessedLocation.find(params[:id])\n @processed_location.destroy\n\n respond_to do |format|\n format.html { redirect_to processed_locations_url }\n format.json { head :no_content }\n end\n end",
"def delete_json(path)\n url = [base_url, path].join\n resp = HTTParty.delete(url, headers: standard_headers)\n parse_json(url, resp)\n end",
"def destroy\n @position = Position.find(params[:id])\n @position.destroy\n \n respond_to do |format|\n format.html { redirect_to(positions_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @spatial_coverages = SpatialCoverages.find(params[:id])\n @spatial_coverages.destroy\n\n respond_to do |format|\n format.html { redirect_to spatial_coverage_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to maps_url, notice: 'Map was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @map.destroy\n respond_to do |format|\n format.html { redirect_to maps_url, notice: 'Map was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @csv_map.destroy\n respond_to do |format|\n format.html { redirect_to csv_maps_url, notice: 'Csv map was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @accession_location_entry = AccessionLocationEntry.find(params[:id])\n @accession_location_entry.destroy\n\n respond_to do |format|\n format.html { redirect_to accession_location_entries_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @habitant.destroy\n respond_to do |format|\n format.html { redirect_to habitants_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @chef_mapping = ChefMapping.find(params[:id])\n @chef_mapping.destroy\n\n respond_to do |format|\n format.html { redirect_to chef_mappings_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @attitude.destroy\n respond_to do |format|\n format.html { redirect_to attitudes_url, notice: 'Attitude was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @point.destroy\n respond_to do |format|\n format.html { redirect_to points_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @coordenador_estagio = CoordenadorEstagio.find(params[:id])\n @coordenador_estagio.destroy\n\n respond_to do |format|\n format.html { redirect_to coordenador_estagios_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @moretinymap = Moretinymap.find(params[:id])\n @moretinymap.destroy\n\n respond_to do |format|\n format.html { redirect_to moretinymaps_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @character.destroy\n respond_to do |format|\n format.html { redirect_to @game }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location = $User.get_location(params[:id])\n @location.kill\n GlobalData.reload(:locations)\n respond_to do |format|\n format.html { redirect_to('/locations') }\n format.xml { head :ok }\n end\n end",
"def delete\n NamedMap.stats_aggregator.timing('named-map.delete') do\n response = self.class.http_client.delete( url + '?api_key=' + @parent.api_key,\n {\n headers: @parent.headers,\n ssl_verifypeer: @parent.verify_cert,\n ssl_verifyhost: @parent.verify_host,\n followlocation: true,\n connecttimeout: HTTP_CONNECT_TIMEOUT,\n timeout: HTTP_REQUEST_TIMEOUT\n } )\n raise HTTPResponseError, \"DELETE:#{response.code} #{response.request.url} #{response.body}\" unless response.code == 204\n end\n end",
"def destroy\n set_location.destroy\n respond_to do |format|\n format.html { redirect_to locations_url, flash: {success: \"Successfully deleted #{@location.name} location!\" }}\n format.json { head :no_content }\n end\n end",
"def destroy\n @locacao = Locacao.find(params[:id])\n @locacao.destroy\n\n respond_to do |format|\n format.html { redirect_to locacaos_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n\n\n\n @committee_position.destroy\n flash[:danger] = \"Committee Position Deleted.\"\n respond_to do |format|\n format.html { redirect_to committee_positions_path }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location_mapping.destroy\n respond_to do |format|\n format.html { redirect_to location_mappings_url, notice: 'Location mapping was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @location.destroy\n\n respond_to do |format|\n format.html { redirect_to locations_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @gethotel = Gethotel.find(params[:id])\n @gethotel.destroy\n\n respond_to do |format|\n format.html { redirect_to gethotels_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @corp_location = CorpLocation.get(params[:id])\n @corp_location.destroy\n\n respond_to do |format|\n format.html { redirect_to corp_locations_url }\n format.json { head :no_content }\n end\n end"
] | [
"0.7031428",
"0.6983041",
"0.68709964",
"0.6789183",
"0.67276955",
"0.6675587",
"0.6669673",
"0.66657853",
"0.66563004",
"0.665613",
"0.66424036",
"0.6629626",
"0.6615284",
"0.66061103",
"0.65896654",
"0.6583766",
"0.65780336",
"0.65647817",
"0.6558075",
"0.6550642",
"0.6550642",
"0.6550642",
"0.6546188",
"0.651476",
"0.6504261",
"0.6496332",
"0.6492462",
"0.6466143",
"0.6449172",
"0.643981",
"0.6433876",
"0.6428243",
"0.6423329",
"0.6414113",
"0.64095676",
"0.64076674",
"0.63971907",
"0.6389144",
"0.6389144",
"0.6389144",
"0.6386408",
"0.63826",
"0.6366301",
"0.63533",
"0.635009",
"0.63460886",
"0.634559",
"0.63440096",
"0.63434803",
"0.6339631",
"0.6333457",
"0.6332312",
"0.6329048",
"0.6328808",
"0.6324991",
"0.63205236",
"0.6317438",
"0.6303983",
"0.6301862",
"0.6292391",
"0.62885934",
"0.62816966",
"0.6276983",
"0.6273233",
"0.62682694",
"0.6267541",
"0.6266714",
"0.62647945",
"0.6264227",
"0.6258455",
"0.62581426",
"0.62563205",
"0.6252254",
"0.62503153",
"0.62479776",
"0.6247238",
"0.6246811",
"0.6242628",
"0.6240183",
"0.62384176",
"0.62376267",
"0.62376267",
"0.62361157",
"0.623462",
"0.62341774",
"0.6227543",
"0.6227332",
"0.62271774",
"0.6223135",
"0.6220622",
"0.6220328",
"0.62139416",
"0.6211456",
"0.6211422",
"0.621003",
"0.6209719",
"0.6208467",
"0.62071514",
"0.62069905",
"0.62039876"
] | 0.77763456 | 0 |
engine registry is a simple hash | def register hash
hash.each_pair do |name, engine_class|
raise "Class must implement the CanTango::Engine API." unless self.class.valid_engine?(engine_class)
raise "Name of engine must be a String or Symbol" if !name.kind_of_label?
registered[name.to_s] = ability_engine(engine_class)
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def registry\n @registry ||= {}\n end",
"def registry; end",
"def registry; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash(*) end",
"def hash()\n #This is a stub, used for indexing\n end",
"def registry\n\t\tbrand(Rex::Post::Meterpreter::Extensions::Stdapi::Sys::Registry)\n\tend",
"def hash\n end",
"def hash\n end",
"def hash\n end",
"def register hash\n hash.each_pair do |name, engine_class|\n raise \"Class must implement the CanTango Engine API. You can start by sublclassing CanTango::Engine\" if !engine? engine_class\n raise \"Name of engine must be a String or Symbol\" if !name.kind_of_label?\n registered[name.to_sym] = engine_class\n end\n end",
"def hash(key); end",
"def name_and_engine\n @engine_name_and_engine ||= Hash[*engines.collect {|e| [engine_name(e), e]}.flatten]\n end",
"def registry_interface_hash\n hash = calculate_interface_hash(\n [\n {\n name: 'bind',\n descriptor: '(Ljava/lang/String;Ljava/rmi/Remote;)V',\n exceptions: ['java.rmi.AccessException', 'java.rmi.AlreadyBoundException', 'java.rmi.RemoteException']\n },\n {\n name: 'list',\n descriptor: '()[Ljava/lang/String;',\n exceptions: ['java.rmi.AccessException', 'java.rmi.RemoteException']\n },\n {\n name: 'lookup',\n descriptor: '(Ljava/lang/String;)Ljava/rmi/Remote;',\n exceptions: ['java.rmi.AccessException', 'java.rmi.NotBoundException', 'java.rmi.RemoteException']\n },\n {\n name: 'rebind',\n descriptor: '(Ljava/lang/String;Ljava/rmi/Remote;)V',\n exceptions: ['java.rmi.AccessException', 'java.rmi.RemoteException']\n },\n {\n name: 'unbind',\n descriptor: '(Ljava/lang/String;)V',\n exceptions: ['java.rmi.AccessException', 'java.rmi.NotBoundException', 'java.rmi.RemoteException']\n }\n ]\n )\n\n hash\n end",
"def initialize\n @registry = {}\n end",
"def registry\n return {} unless _registry\n\n _registry.dup.freeze\n end",
"def register(params)\n @fields = params[\"fields\"]\n @tag = params[\"tag\"]\n @key = params[\"key\"]\n @digest = OpenSSL::Digest::SHA256.new\nend",
"def registry\n @registry\n end",
"def hash()\n #This is a stub, used for indexing\nend",
"def hash\n name.hash ^ provider.hash\n end",
"def class\n Registry\n end",
"def registry_key\n return @registry_key\n end",
"def hash=(_arg0); end",
"def rehash() end",
"def hash\n\t\t(language + type + klass + thing).hash\n\tend",
"def registered\n @@registry_lock.synchronize { @@registry.keys }\n end",
"def hash\n guid.hash\n end",
"def default_key \n Digest::SHA1.hexdigest(\"riaque:#{name}\")\n end",
"def hash_code; end",
"def name_and_class\n @engine_name_and_class ||= Hash[*engines.collect { |e| [engine_name(e), engine_class_name(e)] }.flatten]\n engine_name_and_class.dup\n end",
"def registry_hive\n return @registry_hive\n end",
"def registry\n @registry ||= client.registry\n end",
"def hash\n name.hash ^ version.hash\n end",
"def hash\r\n\t\treturn @name.hash() + @type.hash()\r\n\tend",
"def registry\n @registry ||= Registry.new(self)\n end",
"def path_and_name\n @engine_path_and_name ||= Hash[*engines.collect { |e| [engine_class_name(e).underscore, engine_name(e)] }.flatten]\n engine_path_and_name.dup\n end",
"def fingerprint; end",
"def hash\n @hash\n end",
"def hash_for(expression)\n\n end",
"def registers; end",
"def hasher\n Hashids.new(@hash_id_state[:salt], @hash_id_state[:length])\n end",
"def registry_key=(value)\n @registry_key = value\n end",
"def evalsha(*args); end",
"def evalsha(*args); end",
"def plugin_hash; end",
"def hash; map{|el| \"#{el.name} @ #{el.hash}\"}; map(&:hash).reduce(:+) % 2**32; end",
"def hash\n self.class.name.hash ^ @key.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n raw = [name, type, values.join('/')].join(' ')\n Digest::MD5.hexdigest(raw)\n end",
"def hash\n self.class.name.hash\n end",
"def hash_key(name); end",
"def __hash\n @hash\n end",
"def for backend\n registry[backend]\n end",
"def hash\n [hint,name,ordinal,module_name].hash\n end",
"def hash\n self.class.hash ^ @ns.hash\n end",
"def run\n print_status(\"Running module against #{sysinfo['Computer']}\")\n host = Rex::FileUtils.clean_path(sysinfo['Computer'])\n hash_file = store_loot('windows.hashes', 'text/plain', session, '', \"#{host}_hashes.txt\", 'Windows Hashes')\n print_status('Hashes will be saved to the database if one is connected.')\n print_good('Hashes will be saved in loot in JtR password file format to:')\n print_status(hash_file)\n smart_hash_dump(datastore['GETSYSTEM'], hash_file)\n end",
"def key_manager; end",
"def hash\n\n self.h.fei.hash\n end",
"def unit_hash\n end",
"def find_registry\n @registry = Spree::Registry.find_by_access_hash!(params[:id])\n end",
"def hash\n 0\n end",
"def registry_hive=(value)\n @registry_hive = value\n end",
"def register\r\n \r\n end",
"def hash\n [ name, database, password ].hash\n end",
"def hash\r\n @_hash ||= _hash\r\n end",
"def register(instance)\n key = build_key(instance)\n key_registry[key] = instance\n end",
"def hash\n type.hash ^ (id.hash >> 1)\n end",
"def hash\n\t\treturn self.name.to_s.hash\n\tend",
"def add(hash); end",
"def passive\t\n\thash=Digest::MD5.hexdigest(@tagpattern)\n\t[{:name=>\"tag pattern hash\",:string=>hash}]\nend",
"def hashed_input\n\t\tmd5_hex = Digest::MD5.hexdigest(@term)\n\t\tmd5_hex.scan(/../).map { |s| s.to_i(16) }\n\tend",
"def hashed_rooster\r\nend",
"def registry\n return @registry\n end",
"def hash\n code.hash\n end",
"def make_plugin_hash; end",
"def initialize\n @registry = Registry.new\n end",
"def reset_engine_registrations!\n @@registered_engine_confs = {}\n end",
"def register(session_id)\n md5 = Digest::MD5::new\n now = Time::now\n md5.update(now.to_s)\n md5.update(String(now.usec))\n md5.update(String(rand(0)))\n md5.update(String($$))\n md5.update('foobar')\n md5.update(owner_id.to_s)\n md5.update(job_id.to_s)\n md5.update(server.to_s)\n key = md5.hexdigest\n \n Cache.put(\"#{session_id}_#{key}\", self )\n \n reverse_key = [owner_id, job_id, server].join(\"_\")\n Cache.put(\"#{session_id}_#{reverse_key}\", key )\n end",
"def hash\n @hash.hash\n end",
"def hash\n @symbols.hash + 37*positive?.hash\n end",
"def key; end",
"def key; end"
] | [
"0.68308985",
"0.68308985",
"0.68308985",
"0.68308985",
"0.68308985",
"0.68308985",
"0.68308985",
"0.67354125",
"0.6539229",
"0.6539229",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.64283586",
"0.6398715",
"0.6362446",
"0.6302801",
"0.62873256",
"0.62873256",
"0.62873256",
"0.6226251",
"0.6199667",
"0.6182731",
"0.6125245",
"0.61137563",
"0.61112934",
"0.6110832",
"0.6099061",
"0.60831463",
"0.607694",
"0.6026516",
"0.6019372",
"0.59764326",
"0.5951206",
"0.59207785",
"0.5912092",
"0.588069",
"0.58643734",
"0.5842844",
"0.5810309",
"0.58084095",
"0.57968575",
"0.57886314",
"0.57810456",
"0.57770604",
"0.57684714",
"0.57612926",
"0.57126886",
"0.5703618",
"0.5697825",
"0.56965894",
"0.568484",
"0.5679944",
"0.5679944",
"0.5658672",
"0.5653317",
"0.5637441",
"0.56313556",
"0.56313556",
"0.56313556",
"0.56209534",
"0.56206256",
"0.5619921",
"0.5600383",
"0.5596435",
"0.5596072",
"0.5594083",
"0.55705214",
"0.55652153",
"0.5564503",
"0.55531293",
"0.55518883",
"0.5548898",
"0.5537781",
"0.5535352",
"0.5499575",
"0.5495923",
"0.5489529",
"0.54865354",
"0.5474703",
"0.5474312",
"0.5464134",
"0.5458183",
"0.54566646",
"0.5450009",
"0.54491526",
"0.5447137",
"0.5446375",
"0.5445944",
"0.5444508",
"0.5441768",
"0.5441468",
"0.54379857",
"0.54379857"
] | 0.6171483 | 29 |
Callbacks Constants Methods Default | def to_s
name
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def callbacks; end",
"def callbacks; end",
"def callback\n\n end",
"def callback\n end",
"def callback\n\tend",
"def callback=(_arg0); end",
"def callback_type; end",
"def callbacks\n @callbacks ||= {}\n end",
"def definecallback(&b)\r\n\t\t\t@callback = b\r\n\t\tend",
"def callback(&blk)\n @blk=blk\n end",
"def definecallback(&b)\r\n\t$gamemode.definecallback(&b)\r\nend",
"def on_success(&block); end",
"def callback_method\n run(\"on\", \"string\", \"callback\")\n end",
"def message_callbacks\n @messagecbs\n end",
"def callback\n self.class.callback\n end",
"def callback\n self.class.callback\n end",
"def notifier=(_arg0); end",
"def notifier=(_arg0); end",
"def callback &block\n super\n end",
"def callback &block\n super\n end",
"def callback\n def_deferr = ::EventMachine::DefaultDeferrable.new\n proc_callback = Proc.new { |response| ::OnesnooperServer::Log.debug(\n \"[#{self.class.name}] Handled as: #{response}\"\n ) }\n\n def_deferr.callback &proc_callback\n def_deferr.errback &proc_callback\n\n def_deferr\n end",
"def run_callbacks(options = T.unsafe(nil), &block); end",
"def run_callbacks(index = T.unsafe(nil)); end",
"def notifier; end",
"def notifier; end",
"def callback\n @callback ||= :value.to_proc\n end",
"def callback(&block)\n @callbacks ||= []\n @callbacks << block\n end",
"def cheque_callback(params,&block)\n\t\t\n\tend",
"def nuixWorkerItemCallbackInit\nend",
"def after_set_callback; end",
"def initialize()\n @callbacks = {}\n end",
"def call() end",
"def calls; end",
"def calls; end",
"def helpers(&b)\n Callback.class_eval(&b)\n end",
"def listener; end",
"def setMethod\n #Barcode.enumerate(url_for(:action => :enumCallback))\n puts \"Parameter #{@params} \"\n autoobject = @params['object']\n automethod = @params['method']\n type = @params['type']\n callback = @params['callback'] \n #callback = \"enumCallbackCommon\"\n\n if type == 'async'\n begin\n #eval autoobject +\".\"+ automethod +\" url_for(:action => :\"+callback+\")\"\n puts autoobject +\".\"+ automethod +\"(\"+callback+\")\"\n eval autoobject +\".\"+ automethod +\"(\"+callback+\")\"\n rescue => ex\n puts \"Exception Thrown: #{ex.message}\" \n end\n\n elsif type == 'sync'\n if callback.index(\"|\")\n puts \"here in |\"\n callbackValue = callback.split('|');\n arguments = callbackValue[0]\n callback = callbackValue[1]\n begin\n output = eval autoobject +\".\"+ automethod +\"(\"+arguments+\")\"\n puts \"#{output}\"\n callMethod = callback+\"(output)\"\n eval callMethod\n rescue => ex\n Alert.show_popup \"Exception Thrown: #{ex.message}\" \n end\n else\n begin\n output = eval autoobject +\".\"+ automethod\n puts \"#{output}\"\n callMethod = callback+\"(output)\"\n eval callMethod\n rescue => ex\n Alert.show_popup \"Exception Thrown: #{ex.message}\" \n end\n end\n\n elsif type=='lambda'\n\n if callback.index(\"|\")\n puts \"here in |\"\n callbackValue = callback.split('|');\n arguments = callbackValue[0]\n callback = callbackValue[1]\n puts \"arguments #{arguments}\"\n puts \"callback #{callback}\"\n\n fnlambda = arguments + \",\" + 'lambda{|args| puts \"lambda: #{args}\"\n callMethod = callback+\"(args)\"\n eval callMethod\n }'\n else\n fnlambda = 'lambda{|args| puts \"lambda: #{args}\"\n callMethod = callback+\"(args)\"\n eval callMethod\n }'\n end\n begin\n #eval autoobject +\".\"+ automethod +\" url_for(:action => :\"+callback+\")\"\n puts autoobject +\".\"+ automethod +\"(\"+fnlambda+\")\"\n eval autoobject +\".\"+ automethod +\"(\"+fnlambda+\")\"\n rescue => ex\n puts \"Exception Thrown: #{ex.message}\" \n end\n else\n\n # autoobject = autoobject.capitalize\n if(autoobject != 'self') #If self is there no need to make it capital.\n autoobject = autoobject.slice(0,1).capitalize + autoobject.slice(1..-1)\n end\n answer = nil\n if(callback.empty?)\n puts autoobject +\".\"+ automethod\n answer = eval autoobject +\".\"+ automethod\n else\n puts autoobject +\".\"+ automethod + \"(\"+callback+\")\"\n eval autoobject +\".\"+ automethod +\"(\"+callback+\")\"\n end\n render :string => answer.to_s\n end\n\n\n\nend",
"def run_callbacks(index = T.unsafe(nil), &block); end",
"def handlers; end",
"def handlers; end",
"def handlers; end",
"def add_callback(type, options, &block); end",
"def xml_callbacks\n @xmlcbs\n end",
"def handlers=(_arg0); end",
"def success; end",
"def success; end",
"def handler; end",
"def handler; end",
"def functions\n\n end",
"def events; end",
"def events; end",
"def events; end",
"def events; end",
"def events; end",
"def events; end",
"def events; end",
"def events; end",
"def listener=(_arg0); end",
"def called_from; end",
"def called_from; end",
"def callback\n @on_declare\n end",
"def callback\n logger.info ap params\n head :ok\n end",
"def do_success; end",
"def call; end",
"def call; end",
"def call; end",
"def call; end",
"def call; end",
"def call; end",
"def call; end",
"def call; end",
"def invoke; end",
"def callback(&block)\n super do |*args|\n safe_deferrable_block(*args, &block)\n end\n end",
"def future; end",
"def callback( which, *args )\n block = instance_variable_get( \"@on_#{which.to_s}\" )\n block.call( *args ) if block\n end",
"def note_cb(type)\n __debug_line(\"*** SHRINE CALLBACK #{type} *** | #{file_data.inspect}\")\n end",
"def after_generate_callbacks; end",
"def call(*) end",
"def call(*) end",
"def events=(_); end",
"def callbacks_for(*)\n []\n end",
"def coming_soon\n end",
"def coming_soon\n end",
"def initialize(callback)\n @callback = callback\n end",
"def listeners; end",
"def callback\n false\n end",
"def status(*) end",
"def on_307; on_330; end",
"def callback(&block)\n @callbacks << block\n end",
"def ready; end",
"def ready; end",
"def default_callback\n lambda do |type, format, conv, hsz1, hsz2, data_handle, data1, data2|\n case type\n when XTYP_CONNECT # Request to connect from client, creating data exchange channel\n # format:: Not used.\n # conv:: Not used.\n # hsz1:: Handle to the topic name.\n # hsz2:: Handle to the service name.\n # data_handle:: Handle to DDE data. Meaning depends on the type of the current transaction.\n # data1:: Pointer to a CONVCONTEXT structure that contains context information for the conversation.\n # If the client is not a DDEML application, this parameter is 0.\n # data2:: Specifies whether the client is the same application instance as the server. If the parameter\n # is 1, the client is the same instance. If it is 0, the client is a different instance.\n # *Returns*:: A server callback function should return TRUE(1, but DDE_FACK works just fine too)\n # to allow the client to establish a conversation on the specified service name and topic\n # name pair, or the function should return FALSE to deny the conversation. If the callback\n # function returns TRUE and a conversation is successfully established, the system passes\n # the conversation handle to the server by issuing an XTYP_CONNECT_CONFIRM transaction to\n # the server's callback function (unless the server specified the CBF_SKIP_CONNECT_CONFIRMS\n # flag in the DdeInitialize function).\n\n if hsz2 == @service.handle\n cout \"Service #{@service}: connect requested by client\\n\"\n DDE_FACK # instead of true # Yes, this server supports requested (name) handle\n else\n cout \"Service #{@service} unable to process connection request for #{hsz2}\\n\"\n DDE_FNOTPROCESSED # 0 instead of false # No, server does not support requested (name) handle\n end\n\n when XTYP_POKE # Client initiated XTYP_POKE transaction to push unsolicited data to the server\n # format:: Specifies the format of the data sent from the server.\n # conv:: Handle to the conversation.\n # hsz1:: Handle to the topic name. (Excel: [topic]item ?!)\n # hsz2:: Handle to the item name.\n # data_handle:: Handle to the data that the client is sending to the server.\n # *Returns*:: A server callback function should return the DDE_FACK flag if it processes this\n # transaction, the DDE_FBUSY flag if it is too busy to process this transaction,\n # or the DDE_FNOTPROCESSED flag if it rejects this transaction.\n\n @data.topic = dde_query_string(@id, hsz1) # Convert hsz1 into \"[topic]item\" string and\n if @data.receive(data_handle) # Receive incoming DDE data and process it\n\n # Perform actions like :draw, :debug, :timer, :formats on received data (default :timer)\n @actions.each{|action| @data.send(action.to_sym)}\n DDE_FACK # Transaction successful\n else\n @data.debug\n cout \"Service #{@service} unable to process data request (XTYP_POKE) for #{hsz2}\"\n DDE_FNOTPROCESSED # 0 Transaction NOT successful - return (HDDEDATA)TRUE; ?!(why TRUE, not FALSE)\n end\n else\n DDE_FNOTPROCESSED # 0 - return((HDDEDATA)NULL);// is it the same as 0 ?!\n end\n end\n end",
"def call\n\n\tend",
"def call\n\n\tend",
"def progress_callback\n @progress_callback ||= proc { |_, dltotal, dlnow, ultotal, ulnow|\n progress(dltotal, dlnow, ultotal, ulnow)\n 0\n }\n end",
"def global; end",
"def run_actions; end",
"def callback_phase\n super\n end",
"def trigger_callbacks_for(msg)\n case msg.message_type\n\n # ----- server messages\n when RPL_WELCOME\n notify :registered_with_server\n when CMD_PING\n notify :server_ping, msg.params[0] # server wants the params back\n when CMD_ERROR\n notify :server_error\n\n # ----- nick-related -----\n when CMD_NICK\n @state[:nick] = msg.params[0] if msg.prefix[:nick] == @state[:nick]\n threaded_notify :nick_changed, msg.prefix[:nick], msg.params[0]\n when ERR_NICKNAMEINUSE\n # nickname errors are deterministic, that is, the client keeps track of the \n # state of attempted nick changes in @state, and the server responds to them\n # in order, so no additional info needs to be sent in the callback.\n # (this is tested)\n notify :nick_in_use\n when ERR_ERRONEUSNICKNAME\n notify :nick_invalid\n\n # ----- channel-related -----\n when CMD_JOIN\n threaded_notify :joined_channel, msg.user, msg.params[0]\n when CMD_PART\n threaded_notify :left_channel, msg.user, msg.params[0], msg.params[1]\n when CMD_QUIT\n threaded_notify :quit_server, msg.user, msg.params[0]\n when RPL_TOPIC # negative indices handle rfc and non-rfc commands\n threaded_notify :topic_changed, msg.params[-2], msg.params[-1], nil\n when CMD_TOPIC\n threaded_notify :topic_changed, msg.params[0], msg.params[1], msg.user\n when RPL_NAMREPLY\n @state[:scratch] ||= {}\n @state[:scratch][msg.params[-2]] ||= []\n # strip out leading mode characters: @, +, ~, etc.\n @state[:scratch][msg.params[-2]] += msg.params[-1].split.map { |name| name.gsub(/^[^a-zA-Z\\[\\]\\\\`_\\^{}\\|]/,'') }\n when RPL_ENDOFNAMES\n if @state[:scratch]\n threaded_notify :channel_name_list, msg.params[-2], ( @state[:scratch][msg.params[-2]] || [] )\n @state[:scratch].delete(msg.params[-2])\n else\n threaded_notify :channel_name_list, []\n end\n \n # ----- messaging -----\n when CMD_PRIVMSG\n if private?(msg)\n threaded_notify :private_message, msg.params[0], msg.params[1], msg.user\n else\n threaded_notify :channel_message, msg.params[0], msg.params[1], msg.user\n end\n when CMD_NOTICE\n if private?(msg)\n threaded_notify :private_notice, msg.params[0], msg.params[1], msg.user\n else\n threaded_notify :channel_notice, msg.params[0], msg.params[1], msg.user\n end\n\n end\n end",
"def internal; end",
"def get_callbacks\n {\n :config => Proc.new { |c| call_config(c) },\n :machine => Proc.new { |m| call_machine(m) },\n }\nend"
] | [
"0.79225594",
"0.79225594",
"0.72916436",
"0.72867084",
"0.7238036",
"0.72067416",
"0.698272",
"0.6414605",
"0.63994104",
"0.62620276",
"0.6211972",
"0.6207387",
"0.61792666",
"0.6110566",
"0.61039805",
"0.61039805",
"0.6072172",
"0.6072172",
"0.60457146",
"0.60457146",
"0.60075235",
"0.59986186",
"0.5978772",
"0.5960087",
"0.5960087",
"0.59513295",
"0.59511167",
"0.594885",
"0.59449077",
"0.5943555",
"0.5932201",
"0.5924261",
"0.59091794",
"0.59091794",
"0.59027684",
"0.58671725",
"0.5844364",
"0.58383507",
"0.5827943",
"0.5827943",
"0.5827943",
"0.58025444",
"0.5790329",
"0.5784188",
"0.578369",
"0.578369",
"0.5763974",
"0.5763974",
"0.5745568",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57386595",
"0.57205886",
"0.5717249",
"0.5717249",
"0.5714919",
"0.5707439",
"0.5704388",
"0.57019556",
"0.57019556",
"0.57019556",
"0.57019556",
"0.57019556",
"0.57019556",
"0.57019556",
"0.57019556",
"0.56971824",
"0.56910926",
"0.56838304",
"0.56821275",
"0.56655866",
"0.5664515",
"0.5659439",
"0.5659439",
"0.5638354",
"0.56291574",
"0.56156474",
"0.56156474",
"0.56128025",
"0.5609394",
"0.56054187",
"0.56031775",
"0.5601759",
"0.5594094",
"0.5582539",
"0.5582539",
"0.5580206",
"0.5572577",
"0.5572577",
"0.5568391",
"0.55676585",
"0.55672324",
"0.5546646",
"0.5534555",
"0.55335534",
"0.5532578"
] | 0.0 | -1 |
Write a method reverse_words that takes in a sentence string and returns the sentence with the order of the characters in each word reversed. Note that we need to reverse the order of characters in the words, do not reverse the order of words in the sentence. | def reverse_words(sentence)
new_sentence = []
sentence.split(" ").each do |word|
rev_word = ""
word.each_char {|char| rev_word = char + rev_word }
new_sentence << rev_word
end
new_sentence.join(" ")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reverse_words sentence\n sentence.split(\" \").map{ |x| x.reverse }.join(\" \")\nend",
"def reverse_words(sentence)\n\tsentence.split.map! { |word| word.reverse }.join(\" \")\nend",
"def reverse_words(sentence)\n\twords = sentence.split(' ')\n\twords.each do |word|\n\t\tword.reverse!\n\tend\n\treversed_sentence = words.join(\" \")\nend",
"def reverse_words(sentence)\n new_sentence = []\n reversed_sentence = []\n new_sentence = sentence.split(\" \")\n new_sentence.each do |word|\n new_word = word.reverse\n reversed_sentence << new_word\n end\n reversed_sentence.join(\" \")\nend",
"def reverse_words(sentence)\n\tnew_sentence = sentence.split\n\tnew_sentence.map {|x| x.reverse!}\n\treturn new_sentence.join(\" \")\nend",
"def reverse_words(sentence)\n if sentence != \"\"\n sent_array = sentence.split\n sent_array.each do |word|\n word.reverse!\n end\n sentence = sent_array.join(\" \")\n end\n sentence\nend",
"def reverse_words(sentence)\n sentence = reverse(sentence)\n words = sentence.split(/ /)\n words.map! { |word| reverse(word) }\n words.join(\" \")\nend",
"def reverse_words(sentence)\n temp_array = sentence.split(' ')\n temp_array.map! do |x| x.reverse end\n temp_array.join(' ')\nend",
"def reverse_sentence(sentence)\n reverse_string = reverse_string(sentence)\n \n reverse_words = reverse_string.split\n \n new_words = []\n \n reverse_words.each do |word|\n new_words << reverse_string(word)\n end\n \n return new_words.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect { |word| word.reverse }.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|x| x.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\n end",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\n end",
"def reverse_words(sentence)\n\t# improved readability by making this code into two separate lines.\n\tnew_sentence = sentence.split.map! do |word| \n\t\tword.reverse!\n\tend\n\t# removed an unnecessary \"return\"\n\tnew_sentence.join(\" \")\nend",
"def reverse_each_word(sentence)\n array_of_split_string = []\n reversed_words = []\n array_of_split_string = sentence.split\n array_of_split_string.collect {|words| reversed_words << words.reverse!}\n reversed_words.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split(\" \").collect{|x| x.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\n\tsentence.split.collect { |el| el.reverse }.join(\" \")\nend",
"def reverse_words(sentence)\n array = sentence.split(\" \")\n reversed_array = []\n array.each do |word|\n reversedWord = word.reverse\n reversed_array.push(reversedWord)\n end\n return reversed_array.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.map! do |word|\n word.reverse\n end\n .join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split.collect(&:reverse).join(' ')\nend",
"def reverse_each_word(sentence)\n sentence.split(\" \").collect do |word|\n word.reverse\n end\n .join(\" \")\nend",
"def reverse_each_word(sentence)\n backwards = []\n sentence_array = sentence.split(/ /)\n sentence_array.collect do |word|\n backwards << word.reverse\n end\n backwards.join(\" \")\nend",
"def reverse_words(sent)\n sent = sent.split(\" \")\n new=[]\n\n sent.each do |char|\n new << char.reverse\n end\n return new.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.reverse.split.reverse.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.reverse.split.reverse.join(\" \")\nend",
"def reverse_words(sentence)\r\n\tsent_arr = sentence.split(\" \")\r\n\tsent_arr.each do |word|\r\n\t\tsent_arr[sent_arr.index(word)] = word.reverse\r\n\tend\r\n\treturn sent_arr.join(\" \")\r\nend",
"def reverse_each_word(sentence)\n\treversed = Array.new\n\tsentence.split(\" \").map do |word|\n\t\treversed << word.reverse.to_s\n\tend\n\treversed.join(\" \")\nend",
"def reverse_words(words)\n word_array = words.split(\" \")\n word_array.map!{ |word| word.reverse }\n reverse_sentence = word_array.join(\" \")\nend",
"def reverse_each_word(sentence)\n reversed = []\n array = sentence.split(/ /)\n array.collect do |word|\n reversed << word.reverse\n end\n reversed.join(\" \")\nend",
"def reverse_sentence(my_sentence)\n if my_sentence != nil\n # Creates array of words and spaces\n words_array = words(my_sentence)\n\n # Revereses the order of the words\n length = words_array.length\n i = 0\n j = length - 1\n (length / 2).times do\n # selects word at smallest index that has not been swapped\n word_1 = words_array[i]\n # selects words at largest index that has not been swapped\n word_2 = words_array[j]\n \n words_array[i] = word_2\n words_array[j] = word_1\n \n i += 1\n j -= 1\n end\n \n reverse = words_array.join\n\n # Overrides original string with order of characters in reverse\n i = 0\n my_sentence.length.times do\n my_sentence[i] = reverse[i]\n i += 1\n end\n end\n\n return my_sentence\nend",
"def reverse_each_word(sentence)\n sentence_array = sentence.split(/ /) \n final_sentence = sentence_array.collect {|word| word.reverse}\n final_sentence.join(\" \")\nend",
"def reverse_words(sentence)\n arr_of_words = split_sentence(sentence)\n arr_of_reversed_words = reverse_array(arr_of_words)\n join_array(arr_of_reversed_words)\nend",
"def reverse_words(sent)\n split = sent.split(' ')\n reversed = []\n split.each do | word |\n reversed.push(word.reverse)\n end\n return reversed.join(' ')\nend",
"def reverse_each_word(sentence)\n sentence.reverse.split.reverse.join(\" \")\n \nend",
"def reverse_each_word(sentence)\n array = sentence.split()\n reversed_words = array.collect do |word|\n word.reverse\n end\n reversed_words.join(\" \")\nend",
"def reverse_each_word(sentence)\n\tsplit_up = sentence.split(\" \")\n\tsplit_up.collect do |word|\n\t\tword.reverse!\n\tend\n\tsplit_up.join(' ')\nend",
"def reverse_words(sentence)\n # sentence_array = sentence.split\n sentence.split.map(&:reverse)\n .join(' ')\n\nend",
"def reverse_each_word(sentence)\n word_array = []\n rev_array = []\n word_array = sentence.split\n word_array.collect {|word| rev_array << word.reverse}\n return rev_array.join(\" \")\nend",
"def reverse_words (string)\n array = string.split(\"\")\n array.reverse!\n final = array.join(\"\")\n return final\nend",
"def reverse_each_word(sentence)\n array = sentence.split\n array = array.collect {|word| word.reverse}\n array.join(\" \")\nend",
"def reverse_each_word(sentence)\n words = sentence.split(\" \")\n \n result = words.map do |word|\n word.reverse\n end\n result.join(\" \")\nend",
"def reverse_words(sent)\n new_sent = []\n sent.split.each { |word| new_sent << word.reverse}\n new_sent.join(' ')\nend",
"def reverseWords(str) \n str.reverse!\n str.split.map{|word| word.reverse!}.join(\" \")\nend",
"def reverse_each_word(sentence)\n new_array = sentence.split(\" \").collect do |word|\n word.reverse!\n end\n new_array.join(\" \")\nend",
"def reverse_words(sent)\n words = sent.split(\" \")\n reversed_sent = \"\"\n words.each {|word| reversed_sent = reversed_sent + \" \" + reverse_word(word)}\n return reversed_sent\nend",
"def word_reverse(sentence)\n sentence.split(\" \").reverse.join(\" \")\nend",
"def reverse_each_word (sentence)\n array_sentence = sentence.split()\n array_reversed = array_sentence.collect do |word|\n word.reverse\n end\n array_reversed.join(\" \")\nend",
"def reverse_each_word(string)\n sentence_array = string.split(\" \")\n reversed_sentence = []\n sentence_array.collect do |word|\n reversed_sentence << word.reverse\n end\n reversed_sentence.join(\" \")\nend",
"def reverse_words (string)\n string_array = string.split(\" \").to_a\n string_array.each {|word| word.reverse!}\n new_string = string_array.join(\" \")\nend",
"def reverse_words(string)\n string_array = string.split(\" \")\n words = string_array.each {|word| word.reverse!}.join(\" \")\nend",
"def reverse_words(s)\n s.split(' ').map(&:reverse!).join(' ')\nend",
"def reverse_each_word(sentence)\n words = sentence.split(\" \")\n\n new_words = words.collect do |word|\n word.reverse()\n end\n\n new_words.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence = sentence.split(' ')\n sentence.collect do |word|\n word.reverse!\n end\n sentence = sentence.join(' ')\nend",
"def reverse_sentence(my_sentence)\n return my_sentence unless my_sentence.class == String\n \n my_sentence = my_sentence.split(\"\")\n\n my_length = my_sentence.length\n reverse = []\n my_regex = /[^ ]/\n word = \"\"\n space = \"\"\n\n my_length.times do |i|\n #make a word if it's not a space or char\n if my_regex.match?(my_sentence[i])\n reverse << space if (reverse.length == 0 || word.length == 0)\n word += my_sentence[i]\n space = \"\"\n #make a space-word if it's a space\n else\n reverse << word if (reverse.length == 0 || space.length == 0)\n space += \" \"\n word = \"\"\n end\n end\n #make sure the last word/space makes it into the array\n reverse << word if reverse.last != word\n reverse << space if reverse.last != space\n\n #now reverse it!\n an_array = []\n reverse.length.times do |i|\n an_array << reverse.pop\n end\n\n #now put it into a string!\n my_sentence = \"\"\n an_array.map do |my_word|\n my_sentence += my_word\n end\n return my_sentence\nend",
"def reverse_words(words)\n reversed = words.split(' ') # split each sentence into array after each space\n reversed.map {|element| element.reverse!} # loop through and\n reversed.join(' ') # join the array elements with a space between each element\nend",
"def reverse_each_word(string)\n sentence_array = string.split(\" \")\n reversed_array = []\n sentence_array.collect do |string|\n reversed_array << string.reverse\n end\n reversed_array.join(\" \")\nend",
"def reverse_each_word(string)\n s = string.split(\" \")\n backward_sentence = s.collect do |word|\n word.reverse\n end\n backward_sentence.join(\" \")\nend",
"def reverse_words(str)\n words = str.split(\" \")\n words.each { |word| word.reverse! }\n return words.join(\" \")\nend",
"def reverse_words(s)\n output = []\n arr = s.split(\" \")\n \n arr.each do |word|\n output << word.reverse\n end\n \n output.join(\" \")\nend",
"def reverse_each_word(sentence)\n sentence.split(\" \").collect do |word|\n word.reverse\n end\n .join(\" \")\nend",
"def reverse_each_word(sentence)\n\tarray= sentence.split\n\tarray.map! do |word|\n\t\tword.reverse\n\tend\n\tarray.join(\" \")\nend",
"def reverse_sentence(words)\n words.split.reverse.join(' ')\nend",
"def reverse_sentence(sentence)\n return '' if sentence == ''\n list = sentence.split(' ').reverse\n str = '' \n list.each{|word| str << word + \" \"}\n str.strip!\nend",
"def reverse_words(string)\n p string.split.map!{|word| word.reverse}.join(\" \")\nend",
"def reverse_words(string)\n p string.split.map!{|word| word.reverse}.join(\" \")\nend",
"def reverse_each_word(sentence)\r\n new_sentence = sentence.split\r\n new_sentence.collect do |x|\r\n x.reverse!\r\n end\r\n new_sentence.join(\" \")\r\nend",
"def reverseWords(s)\n s.split(\" \").reverse.join(\" \")\nend",
"def reverse_words(str)\n str.split(/(\\s)/).each { |word| word.reverse! }.join(\"\")\nend",
"def reverse_sentence(string)\n array_of_words = string.split.reverse.join(\" \")\nend",
"def reverse_words(string)\n string_split = string.split\n word_count = string_split.length-1\n reversed_string = []\n string_split.each do | word |\n reversed_string << string_split[word_count]\n word_count -= 1\n end\n reversed_string.join(' ')\nend",
"def reverse_each_word(sentence)\n\tarray_of_words = sentence.split(' ')\n\tarray_of_words.collect do |word|\n word.reverse!\n\tend\n\tarray_of_words.join(\" \")\nend",
"def reverse_each_word(str)\n words=str.split(' ')\n sentence = []\n words.collect do |word|\n sentence << word.reverse\n end\n sentence.join(' ')\nend",
"def reverse_words(sent)\n words = sent.split(\" \")\n new_words = []\n words.each do |word|\n new_words << word.reverse\n end \n return new_words.join(\" \")\nend",
"def reverse_sentence(my_sentence)\n \n return nil if my_sentence == nil\n \n # reverses all characters in sentence\n i = 0\n j = my_sentence.length - 1\n \n while i < my_sentence.length / 2\n temp_i = my_sentence[i]\n temp_j = my_sentence[j]\n \n my_sentence[i] = temp_j\n my_sentence[j] = temp_i\n \n i += 1 \n j -= 1\n end\n \n \n # reverses any words in the sentence\n i = 0\n word_start = 0\n word_end = 0\n in_word = true\n \n while i < my_sentence.length\n if ( my_sentence[i] == \" \" || i == my_sentence.length - 1 ) && in_word == true\n \n word_end = i \n word_end -= 1 if my_sentence[i] == \" \"\n \n word_length = word_end - word_start + 1\n count = 0\n \n while count < word_length / 2\n temp_start = my_sentence[word_start]\n temp_end = my_sentence[word_end]\n \n my_sentence[word_start] = temp_end\n my_sentence[word_end] = temp_start\n \n word_start += 1\n word_end -= 1\n count += 1\n end\n \n in_word = false\n end\n \n if my_sentence[i] != \" \" && in_word == false\n word_start = i\n in_word = true\n end\n \n i += 1\n end\n \n return my_sentence\nend",
"def reverse_each_word(string)\n sentence = string.split()\n sentence.collect do |word|\n word.reverse!\n end\n sentence.join(\" \")\nend",
"def reverse_words(sent)\n words = sent.split(\" \")\n new_words = []\n words.each { |word| new_words << word.reverse }\n new_sent = new_words.join(\" \")\n return new_sent\nend",
"def reverse_words(string)\n string_array = string.split(\" \")\n string_array.each do |word|\n word.reverse!\n end\n p string_array\n reversed_sentance = \"\"\n string_array.each do |reverse_word|\n reversed_sentance += reverse_word + \" \"\n end\n p reversed_sentance.chop\nend",
"def reverse_words(string)\n words = string.split()\n words.each_with_index { |word, index| words[index] = word.reverse() }\n p words.join(\" \")\nend",
"def reverse_each_word(sentence)\narrayed = sentence.split (\" \")\nreversed = arrayed.collect {|i| i.reverse}\nreversed.join (\" \")\nend",
"def reverse_words(sent)\r\n reverse = []\r\n sent.split(\" \").each do |word|\r\n reverse << word.reverse\r\n end\r\n return reverse.join(\" \")\r\nend",
"def reverse_sentence(my_sentence)\n return nil if my_sentence.nil?\n\n array = my_sentence.split(/(\\S+)|(\\W)/)\n\n first_index = 0\n last_index = (array.length) - 1\n \n while first_index < last_index\n temp = array[first_index]\n array[first_index] = array[last_index]\n array[last_index] = temp\n first_index += 1\n last_index -= 1\n end\n\n array = array.join(\"\")\n\n array.length.times do |n|\n my_sentence[n] = array[n]\n end\n return my_sentence \nend",
"def reverse_words(sent)\n words = sent.split(\" \")\n new_words = []\n words.each { |word| new_words << word.reverse }\n new_sent = new_words.join(\" \")\n return new_sent\nend",
"def reverse_each_word(sentence)\n sentence.split.collect {|word| word.reverse}.join(' ')\n # new_sentence = []\n # new_sentence = sentence.split(' ')\n # new_sentence.collect {|word| word.reverse}\n # new_sentence.join(' ')\n end",
"def reverse_each_word(sentence2)\n sentence2.split.collect {|word| word.reverse}.join(\" \")\nend",
"def reverse_words(string)\n split_array = string.split(' ')\n results_array = split_array.map! do |word|\n word.reverse\n end\n results_array.join(' ')\nend",
"def reverse_sentence(sentence)\n words = sentence.split(' ')\n reversed_words = []\n\n i = 0\n while i < words.length\n reversed_words = [words[i]] + reversed_words\n i += 1\n end\n\n reversed_words.join(' ')\nend",
"def reverse_sentence(sentence)\n words = sentence.split(' ')\n reversed_words = []\n\n i = 0\n while i < words.length\n reversed_words = [words[i]] + reversed_words\n i += 1\n end\n\n reversed_words.join(' ')\nend",
"def reverse_words(string)\n string.split(' ').map{|word| word.reverse}.join(' ')\nend",
"def reverse_words(s)\n result = s.split(\" \").map do |word|\n word.reverse\n end\n\n result.join(\" \")\nend",
"def reverse_words(sentence)\n if sentence == nil || sentence.length < 2\n return sentence\n else\n index = 0\n start_index = 0\n finish_index = 0\n\n until finish_index > sentence.length\n if sentence[index] == \" \" || index + 1 == sentence.length\n a = start_index\n\n if sentence[index] == \" \"\n b = finish_index - 1\n elsif index + 1 == sentence.length\n b = finish_index\n end\n\n while a < b\n temp = sentence[b]\n sentence[b] = sentence[a]\n sentence[a] = temp\n a += 1\n b -= 1\n end\n index += 1\n finish_index += 1\n start_index = finish_index\n\n elsif sentence[index] != \" \"\n index += 1\n finish_index += 1\n end\n end\n\n return sentence\n\n end\nend",
"def reverse_words(str)\n str.split(\" \").map{ |i| i.reverse }.join(\" \")\nend",
"def reverse_words(string)\n word_array = string.split\n reversed_sentence = [ ]\n \n word_array.each do |word|\n reversed_sentence.push(word.reverse)\n end\n\n p reversed_sentence.join(\" \")\nend",
"def reverse_each_word(sentence)\n y = \"\"\n x =sentence.reverse.split(\" \")\n x = x.reverse\n x.collect do |word|\n y << word << \" \"\n end\n return y.chomp(\" \")\nend",
"def reverse_sentence(str)\n words_array = str.split.reverse.join(' ')\n words_array\nend",
"def reverse_words(my_words)\n # raise NotImplementedError\n return my_words if my_words.nil? || my_words.empty?\n\n i = 0 # starting point of sentence\n j = my_words.length # ending point of sentence\n\n while i < j\n while my_words[i] == \" \" && i < j\n i += 1\n end\n\n word_start = i # starting point of next word in sentence\n\n while my_words[i] != \" \" && i < j\n i += 1\n end\n\n word_end = i - 1 # end point of word in sentence\n\n word_reverse(my_words, word_start, word_end)\n end\n return\nend",
"def reverse_each_word(sentence)\n array = sentence.split(\" \")\n array_new = []\n array_new = array.collect do |element| \n element.reverse \n end\n array_new.join(\" \")\nend"
] | [
"0.84299076",
"0.841687",
"0.83713937",
"0.83434224",
"0.832155",
"0.8298649",
"0.82913035",
"0.82836974",
"0.8276433",
"0.8265693",
"0.82495147",
"0.8248501",
"0.8248501",
"0.82393116",
"0.82341933",
"0.8226766",
"0.8226766",
"0.8226766",
"0.8226766",
"0.8208294",
"0.8206099",
"0.81970644",
"0.8187964",
"0.81833375",
"0.81722933",
"0.8158842",
"0.8156985",
"0.81465477",
"0.81465477",
"0.8140196",
"0.8132561",
"0.8131938",
"0.81185097",
"0.8112094",
"0.80991703",
"0.80921847",
"0.8088752",
"0.80821764",
"0.8082054",
"0.8081034",
"0.8075869",
"0.80664647",
"0.80646574",
"0.8064406",
"0.8058211",
"0.80530405",
"0.80502844",
"0.804628",
"0.80360866",
"0.8035899",
"0.80330646",
"0.8028881",
"0.8027122",
"0.8003104",
"0.80003077",
"0.7999836",
"0.7996782",
"0.7991981",
"0.7979966",
"0.7972764",
"0.79697204",
"0.7967799",
"0.7967543",
"0.7966271",
"0.7963547",
"0.79611313",
"0.79600006",
"0.79592294",
"0.79592294",
"0.7952627",
"0.79495776",
"0.79458857",
"0.7940642",
"0.79317576",
"0.7923808",
"0.7919699",
"0.7918991",
"0.79161644",
"0.7913326",
"0.79004586",
"0.7893201",
"0.78907",
"0.7889548",
"0.7886208",
"0.7886174",
"0.78855276",
"0.7881369",
"0.7876743",
"0.7876424",
"0.78734887",
"0.78734887",
"0.78731006",
"0.78665954",
"0.7858499",
"0.78507096",
"0.7846662",
"0.78436303",
"0.78408986",
"0.78389853",
"0.7838583"
] | 0.80678165 | 41 |
Return command line arguments | def cli_arguments
%W(
--fail-fast
) + strategy.spec_files(mutation.subject)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def arguments\n @args ||= {}\n unless @args.size > 0\n ARGV.each_with_index do |arg, index|\n if arg.start_with?('-')\n if index + 1 < ARGV.size\n next_arg = ARGV[index + 1]\n if next_arg.start_with?('-') then\n @args.update(argument_present_or_direct(arg))\n else\n @args.update(arg => next_arg)\n end\n else\n @args.update(argument_present_or_direct(arg))\n end\n end\n end\n end\n @args\n end",
"def arguments\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n\n return args\n end",
"def argv; argline.split(/ +/) unless argline.nil?; end",
"def arguments()\r\n args = OpenStudio::Ruleset::OSArgumentVector.new\r\n\r\n return args\r\n end",
"def arguments()\r\n args = OpenStudio::Ruleset::OSArgumentVector.new\r\n\r\n return args\r\n end",
"def arguments()\r\n args = OpenStudio::Ruleset::OSArgumentVector.new\r\n \r\n return args\r\n end",
"def args\n return [] unless options[\"args\"]\n options[\"args\"].map do |options|\n Argument.new options\n end\n end",
"def getArguments\n\n\t# Parse the arguments\n\ttheArgs = { :clang => false,\n\t\t\t\t:rewrite => false,\n\t\t\t\t:help => false,\n\t\t\t\t:paths => [],\n\t\t\t\t:exclude => [] }\n\n\ttheParser = OptionParser.new do |opts|\n\t\topts.banner = \"Usage:\\n rn-format [--help] [--clang] [--rewrite] [--exclude=PATH] PATH [PATH...]\";\n\t\topts.separator \"\";\n\t\topts.separator \"Reformat any source files within the supplied paths,\";\n\t\topts.separator \"displaying the results to standard output.\";\n\t\topts.separator \"\";\n\t\topts.separator \"Options:\";\n\n\t\topts.on('--clang',\t\t\t\t\t\t'Show raw clang-format output') do\n\t\t\ttheArgs[:clang] = true;\n\t\tend\n\n\t\topts.on('--rewrite',\t\t\t\t\t'Rewrite files in-place') do\n\t\t\ttheArgs[:rewrite] = true;\n\t\tend\n\n\t\topts.on('--exclude=PATH',\t\t\t\t'Exclude a path') do |thePath|\n\t\t\ttheArgs[:exclude] << File.expand_path(thePath);\n\t\tend\n\n\t\topts.on('--help',\t\t\t\t\t\t'Show the help') do\n\t\t\ttheArgs[:help] = true;\n\t\tend\n\tend\n\n\ttheParser.parse!;\n\ttheArgs[:paths] = ARGV;\n\n\n\n\t# Show the help\n\tif (theArgs[:help] || theArgs[:paths].empty?)\n\t\tputs theParser.help();\n\t\texit(false);\n\tend\n\t\n\treturn theArgs;\n\nend",
"def arguments\n parser.arguments\n end",
"def command_arguments(options={})\n args = []\n args.concat(['--type', options[:type].to_s]) if options[:type]\n args.concat(['--line-break', options[:line_break].to_s]) if options[:line_break]\n args.concat(['--charset', options[:charset].to_s]) if options[:charset]\n\n if options[:type].to_s == 'js'\n args << '--nomunge' unless options[:munge]\n args << '--preserve-semi' if options[:preserve_semicolons]\n args << '--disable-optimizations' unless options[:optimize]\n end\n\n args\n end",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n # this measure does not require any user arguments, return an empty list\n return args\n end",
"def extract_arguments!\n return ARGV[0], nil, nil if ARGV.length == 1\n\n raise(ArgumentError, \"Usage: mixtape-bu SOURCE [CHANGES] [DEST]\") unless ARGV.length == 3\n\n ARGV.take(3)\nend",
"def arguments\n \"\"\n end",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n\n # this measure does not require any user arguments, return an empty list\n\n return args\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n # this measure does not require any user arguments, return an empty list\n\n return args\n end",
"def argv; end",
"def get_target_args\n\t\t\targs = ARGV.reject {|arg| arg =~ /^-/ || Rake::Task.task_defined?(arg) }\n\t\t\treturn args\n\t\tend",
"def arguments(workspace)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n return args\n end",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n\n # url of the city database\n city_db_url = OpenStudio::Measure::OSArgument.makeStringArgument('city_db_url', true)\n city_db_url.setDisplayName('City Database Url')\n city_db_url.setDescription('Url of the City Database')\n city_db_url.setDefaultValue('')\n args << city_db_url\n\n # project id to update\n project_id = OpenStudio::Measure::OSArgument.makeStringArgument('project_id', true)\n project_id.setDisplayName('Project ID')\n project_id.setDescription('Project ID to generate reports for.')\n project_id.setDefaultValue('0')\n args << project_id\n\n # datapoint id to update\n datapoint_id = OpenStudio::Measure::OSArgument.makeStringArgument('datapoint_id', true)\n datapoint_id.setDisplayName('Datapoint ID')\n datapoint_id.setDescription('Datapoint ID to generate reports for.')\n datapoint_id.setDefaultValue('0')\n args << datapoint_id\n\n return args\n end",
"def arguments(workspace)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n return args\n end",
"def arguments\n tailing_args = tailing_non_options\n\n if tailing_args.any? { |arg| arg[0,1] == '-' }\n tailing_args.unshift('--')\n end\n\n return leading_non_options + options + tailing_args\n end",
"def parse_arguments\n @arguments = ARGV.collect { |arg| arg.strip }\n @filename = Pathname.new(@arguments.first)\n end",
"def args(argv)\n Docopt.docopt(docopt, version: @version, argv:argv)\n end",
"def get_args( from: '', required: false, default_value: [], comment: '' )\n cmd_options = @options['command_line_options'][from]\n # Auto documentation of this option\n @doc.push \"Option to pass arguments from: #{from},\\tdefault_value :#{default_value},\\tcomment :#{comment}\"\n if cmd_options.class == Array\n cmd_options = cmd_options.join ' '\n cmd_options = \"\\\"#{cmd_options}\\\"\" # return with quotes\n else\n @errors.push \" Arguments option #{from} should be an Array. #{cmd_options.class} found\"\n end\n end",
"def arguments()\r\n args = OpenStudio::Ruleset::OSArgumentVector.new\r\n\r\n # todo - add bool arguments to decide what tables to generate, default all to true.\r\n\r\n return args\r\n end",
"def arguments()\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n # this measure will require arguments, but at this time, they are not known\n geometry_profile = OpenStudio::Ruleset::OSArgument::makeStringArgument('geometry_profile', true)\n geometry_profile.setDefaultValue(\"{}\")\n os_model = OpenStudio::Ruleset::OSArgument::makeStringArgument('os_model', true)\n os_model.setDefaultValue('multi-model mode')\n user_id = OpenStudio::Ruleset::OSArgument::makeStringArgument('user_id', true)\n user_id.setDefaultValue(\"00000000-0000-0000-0000-000000000000\")\n job_id = OpenStudio::Ruleset::OSArgument::makeStringArgument('job_id', true)\n #job_id.setDefaultValue(SecureRandom.uuid.to_s)\n ashrae_climate_zone = OpenStudio::Ruleset::OSArgument::makeStringArgument('ashrae_climate_zone', false)\n ashrae_climate_zone.setDefaultValue(\"-1\")\n building_type = OpenStudio::Ruleset::OSArgument::makeStringArgument('building_type', false)\n building_type.setDefaultValue(\"BadDefaultType\")\n\n args << geometry_profile\n args << os_model\n args << user_id\n args << job_id\n args << ashrae_climate_zone\n args << building_type\n\n return args\n end",
"def getOptions\n @options = Array.new\n # format of options argument\n optionsPattern = Regexp.new(/\\-[\\w]+/)\n # check to see if options are formatted correctly\n if optionsPattern.match(ARGV[0])\n # get each option and push them to an array \n # start at 1 to ignore - \n for i in 1..ARGV[0].length - 1\n @options.push(ARGV[0][i])\n end # -- end for loop to get options\n else\n abort(\"First argument needs to be an option.\\nExample:\\n\\t ruby OSQuery -i Blood_rune\")\n end # -- end valid options check\nend",
"def cmd(options={})\n arguments\n end",
"def args\n raw_args\n end",
"def arguments\n @arguments ||= Launchr::OrderedHash.new\n @arguments\n end",
"def args\n Mysh.parse_args(cooked_body)\n end",
"def args\n @args.args\n end",
"def arguments()\n args = OpenStudio::Measure::OSArgumentVector.new\n\n #make an argument for the frequency\n reporting_frequency_chs = OpenStudio::StringVector.new\n reporting_frequency_chs << \"Timestep\"\n reporting_frequency_chs << \"Hourly\"\n reporting_frequency_chs << \"Daily\"\n reporting_frequency_chs << \"Monthly\"\n reporting_frequency_chs << \"RunPeriod\"\n arg = OpenStudio::Measure::OSArgument::makeChoiceArgument('reporting_frequency', reporting_frequency_chs, true)\n arg.setDisplayName(\"Reporting Frequency\")\n arg.setDefaultValue(\"Hourly\")\n args << arg\n\n #make an argument for including optional output variables\n arg = OpenStudio::Measure::OSArgument::makeBoolArgument(\"inc_output_variables\", true)\n arg.setDisplayName(\"Include Output Variables\")\n arg.setDefaultValue(false)\n args << arg\n\n return args\n end",
"def read_arguments\n\tif (ARGV.length() < 2)\n\t\traise ArgumentError, \"Invalid number of arguments, \\n correct usage 'ruby ./661561-project-one.rb <input_file> <regression_type>'\"\n\tend\n\t\n\tfilename = ARGV[0]\n\tregression_type = ARGV[1]\n\n\tif !(VALID_REGRESSIONS.include? regression_type)\n\t\traise ArgumentError, 'Regression type is not valid.'\t\n\tend\n\n\treturn filename, regression_type\n\nend",
"def arguments\r\n args = OpenStudio::Ruleset::OSArgumentVector.new\r\n\r\n #make choice argument for facade\r\n choices = OpenStudio::StringVector.new\r\n choices << \"MessagePack\"\r\n choices << \"CSV\"\r\n choices << \"Both\"\r\n output_format = OpenStudio::Ruleset::OSArgument::makeChoiceArgument(\"output_format\", choices)\r\n output_format.setDisplayName(\"Output Format\")\r\n output_format.setDefaultValue(\"Both\")\r\n args << output_format\r\n\r\n args\r\n end",
"def extract_args(*args)\n options = args.extract_options!\n if options.length.positive?\n [args.pop, options.fetch(:keys) { [] }, options.fetch(:argv) { [] }]\n else\n keys, argv = args.shift(2)\n keys ||= []\n argv ||= []\n [args.pop, keys, argv]\n end\n end",
"def args (configLoc)\n conf = File.dirname(__FILE__) + \"/#{configLoc}\"\n args = Array.new\n args << \"--private_token=#{ENV['TOKEN']}\"\n args << \"--config=#{conf}\"\n return args\n end",
"def build_args\n if File.exist? build_info_file\n build_info = File.readlines build_info_file\n build_info = build_info.map {|x| x.strip }\n build_info.delete \"\"\n build_info\n else\n []\n end\n end",
"def text_from_args\n\t\t\treturn ARGV.join(' ').gsub(\"\\t\",'')\n\t\tend",
"def to_args\n args = [\n \"--name \\\"#{name}\\\"\",\n \"--version \\\"#{version}\\\"\",\n \"--user \\\"#{user}\\\"\",\n \"--group \\\"#{group}\\\"\",\n \"--iteration \\\"#{iteration}\\\"\",\n \"--homepage \\\"#{homepage}\\\"\",\n \"--home \\\"#{home}\\\"\",\n \"--architecture \\\"#{architecture}\\\"\",\n \"--description \\\"#{description}\\\"\",\n \"--maintainer \\\"#{maintainer}\\\"\",\n \"--vendor \\\"#{vendor}\\\"\"\n ]\n args.push \"--dependencies #{dependencies.map{|d| \"\\\"#{d}\\\"\"}.join}\" unless dependencies.nil? || dependencies.empty?\n args.push \"--build-dependencies #{build_dependencies.map{|d| \"\\\"#{d}\\\"\"}.join}\" unless build_dependencies.nil? || build_dependencies.empty?\n args.push \"--compile-cache-dir \\\"#{compile_cache_dir}\\\"\" unless compile_cache_dir.nil? || compile_cache_dir.empty?\n args.push \"--before-precompile \\\"#{before_precompile}\\\"\" unless before_precompile.nil? || before_precompile.empty?\n args.push \"--after-precompile \\\"#{after_precompile}\\\"\" unless after_precompile.nil? || after_precompile.empty?\n args.push \"--before-install \\\"#{before_install}\\\"\" unless before_install.nil? || before_install.empty?\n args.push \"--after-install \\\"#{after_install}\\\"\" unless after_install.nil? || after_install.empty?\n args.push \"--before-remove \\\"#{before_remove}\\\"\" unless before_remove.nil? || before_remove.empty?\n args.push \"--after-remove \\\"#{after_remove}\\\"\" unless after_remove.nil? || after_remove.empty?\n\n args.push \"--license \\\"#{license}\\\"\" unless license.nil? || license.empty?\n args.push \"--buildpack \\\"#{buildpack}\\\"\" unless buildpack.nil? || buildpack.empty?\n args.push \"--buildpack_list \\\"#{buildpack_list}\\\"\" unless buildpack_list.nil? || buildpack_list.empty?\n args.push \"--force-os \\\"#{force_os}\\\"\" unless force_os.nil? || force_os.empty?\n args.push \"--runner \\\"#{runner}\\\"\" unless runner.nil? || runner.empty?\n args.push \"--logrotate-frequency \\\"#{logrotate_frequency}\\\"\" unless logrotate_frequency.nil? || logrotate_frequency.empty?\n args.push \"--logrotate-backlog \\\"#{logrotate_backlog}\\\"\" unless logrotate_backlog.nil?\n args.push \"--env #{env.variables.map{|v| \"\\\"#{v}\\\"\"}.join(\" \")}\" if env.present?\n args.push \"--auto\" if auto\n args.push \"--verbose\" if verbose\n args.push \"--store-cache\" if store_cache\n args.push \"--debug\" if debug\n args.push \"--verify\" if verify\n args.push \"--no-clean\" if !clean\n args.push \"--no-edge\" if !edge\n args.push \"--tmpdir\" if !tmpdir\n args\n end",
"def args() return @args end",
"def parse_arguments\n @command_line_options = {}\n @config.insert 0, '<command_line>', @command_line_options\n\n @options = OptionParser.new do |opts|\n opts.on('-a', '--application STRING', 'set application name') do |application|\n @command_line_options[:application] = application\n end\n\n opts.on('-d', '--destination DIR', 'set destination directory', \"default: #{@config[:destination_directory]}\") do |directory|\n @command_line_options[:destination_directory] = directory\n end\n\n opts.on('-n', '--dryrun', 'do not switch') do\n @command_line_options[:dryrun] = true\n end\n\n opts.on('-V', '--version STRING', 'set application version to deploy') do |version|\n @command_line_options[:version] = version\n end\n end\n @options.parse!\n end",
"def spawn_args\n result = Array.new\n unless environment.empty?\n result << environment\n end\n result.concat(command_line)\n opts = Hash.new\n opts[:chdir] = directory.to_s unless directory.nil?\n opts[:pgroup] = pgroup unless pgroup.nil?\n opts[:umask] = umask unless umask.nil?\n opts[:unsetenv_others] = unsetenv_others unless unsetenv_others.nil?\n opts[:close_others] = close_others unless close_others.nil?\n rlimit.each do |key, value|\n opts[\"rlimit_#{key}\".to_sym] = value\n end\n redirection.each do |key, value|\n opts[key] = value\n end\n result << opts\n result\n end",
"def commander _args\n \"commander _args;\" \n end",
"def arguments(model = nil)\n args = OpenStudio::Measure::OSArgumentVector.new\n\n return args\n end",
"def parse_arguments\n options = {}\n parser = OptionParser.new do |opts|\n opts.on(\"-d\", \"--dir DIR\", \"absolute or relative path of the directory\") do |arg|\n options[:dir] = arg\n end\n\n opts.on(\"-p\", \"--pattern PATTERN\", \"search pattern - can contain asterisk(*) as wildcard\") do |arg|\n options[:pattern] = arg\n end\n end\n parser.parse!\n [options, parser]\nend",
"def args()\n #This is a stub, used for indexing\n end",
"def extract_build_args args # :nodoc:\n return [] unless offset = args.index('--')\n build_args = args.slice!(offset...args.length)\n build_args.shift\n build_args\n end",
"def parsed_args\n args = Options.new('binnacle - Simple Test and Infra automation Framework')\n args.verbose = 0\n args.runner = false\n args.result_json = ''\n\n opt_parser = OptionParser.new do |opts|\n opts.banner = 'Usage: binnacle [options] <testfile>'\n\n opts.on('-w', '--wide', 'Do not crop the task line') { args.wide = true }\n opts.on('-v', '--verbose', 'Verbose output') { args.verbose += 1 }\n opts.on('-r', '--runner', 'Run the tasks from a file (Internal use only)') { args.runner = true }\n opts.on('--results-json=FILE', 'Results JSON file') do |json_file|\n args.result_json = json_file\n end\n\n opts.on('-h', '--help', 'Prints this help') do\n puts opts\n exit\n end\n\n opts.on('--version', 'Show Version information') do\n puts \"Binnacle #{Binnacle::VERSION}\"\n exit\n end\n end\n\n opt_parser.parse!(ARGV)\n\n if ARGV.empty?\n warn 'Task file is not specified'\n exit EXIT_INVALID_ARGS\n end\n\n args.task_files = ARGV\n args\nend",
"def parse_args\n args_map = {:main => $0}\n ARGV.each { |arg|\n sep = arg.index('=')\n\n key = arg[0..sep-1].to_sym\n val = arg[sep+1..-1]\n\n if val == 'true'\n val = true\n end\n if val == 'false'\n val = false\n end\n args_map[key] = val\n }\n return args_map\n end",
"def parsed_argv\n Hash[ARGV.map { |arg| arg.split(\":\") }]\nend",
"def parse argv\n parse_args argv do |argv, remaining_args, arg|\n remaining_args << arg\n end\n end",
"def args\n arguments.is_a?(Array) ? arguments : YAML.load(arguments)\n end",
"def args(args = [])\n args << \"--out=#{::File.basename(r.path)}\"\n args << \"--dir=#{::File.dirname(r.path)}\"\n args << \"--checksum=sha-256=#{r.checksum}\" if r.checksum\n args << \"--header='#{r.header}'\" if r.header\n args << \"--check-certificate=#{r.check_cert}\"\n args << \"--file-allocation=falloc\"\n args << \"--max-connection-per-server=#{r.connections}\"\n args << r.source\n end",
"def preprocess_arguments_for_commands(args)\n # All arguments should be passed through to the atlantis command.\n if args.first == \"atlantis\"\n return args.slice!(1..-1)\n end\n if args.first == \"ssh\"\n arg_index = 1\n arg_index += 1 if Component.names.include?(args[1]) # Skip <component>, if it exists\n while arg_index < args.length\n break if args[arg_index][0] != \"-\"\n arg_index += 1 if args[arg_index] == \"-i\"\n arg_index += 1\n end\n return [] unless arg_index < args.length\n puts \"slicing #{arg_index.inspect}\"\n return args.slice!(arg_index..-1)\n end\n []\nend",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n\n # make an argument for the variable name\n variable_name = OpenStudio::Measure::OSArgument.makeStringArgument('variable_name', true)\n variable_name.setDisplayName('Enter Variable Name.')\n variable_name.setDescription('Valid values can be found in the eplusout.rdd file after a simulation is run.')\n args << variable_name\n\n # make an argument for the electric tariff\n reporting_frequency_chs = OpenStudio::StringVector.new\n reporting_frequency_chs << 'Detailed'\n reporting_frequency_chs << 'Timestep'\n reporting_frequency_chs << 'Zone Timestep'\n reporting_frequency_chs << 'Hourly'\n reporting_frequency_chs << 'Daily'\n reporting_frequency_chs << 'Monthly'\n reporting_frequency_chs << 'Runperiod'\n reporting_frequency = OpenStudio::Measure::OSArgument.makeChoiceArgument('reporting_frequency', reporting_frequency_chs, true)\n reporting_frequency.setDisplayName('Reporting Frequency.')\n reporting_frequency.setDefaultValue('Hourly')\n args << reporting_frequency\n\n # make an argument for the key_value\n key_value = OpenStudio::Measure::OSArgument.makeStringArgument('key_value', true)\n key_value.setDisplayName('Enter Key Name.')\n key_value.setDescription('Enter * for all objects or the full name of a specific object to.')\n key_value.setDefaultValue('*')\n args << key_value\n\n env = OpenStudio::Measure::OSArgument.makeStringArgument('env', true)\n env.setDisplayName('availableEnvPeriods')\n env.setDescription('availableEnvPeriods')\n env.setDefaultValue('RUN PERIOD 1')\n args << env\n\n args\n end",
"def getopt_args\n if short\n [[\"--#{name}\", \"-#{short}\", GetoptLong::REQUIRED_ARGUMENT]]\n else\n [[\"--#{name}\", GetoptLong::REQUIRED_ARGUMENT]]\n end\n end",
"def args\n @x.args.uniq\n end",
"def arguments()\n args = OpenStudio::Measure::OSArgumentVector.new\n\n # make an argument to toggle QAQC\n run_sim_settings_checks = OpenStudio::Measure::OSArgument.makeBoolArgument('run_sim_settings_checks', true)\n run_sim_settings_checks.setDisplayName('Run Checks')\n run_sim_settings_checks.setDescription('If set to true, will run the measure, which adds output variables and increases runtime.')\n run_sim_settings_checks.setDefaultValue(false)\n args << run_sim_settings_checks\n return args\n end",
"def to_shellwords\n argv = []\n argv << %[--autopath] if autopath?\n argv << %[--verbose] if verbose?\n argv << %[--format=\"#{format}\"] if format\n argv << %[--chdir=\"#{chdir}\"] if chdir\n argv << %[--tags=\"#{tags.join(';')}\"] unless tags.empty?\n argv << %[--match=\"#{match.join(';')}\"] unless match.empty?\n argv << %[--units=\"#{units.join(';')}\"] unless units.empty?\n argv << %[--loadpath=\"#{loadpath.join(';')}\"] unless loadpath.empty?\n argv << %[--requires=\"#{requires.join(';')}\"] unless requires.empty?\n argv << files.join(' ') unless files.empty?\n argv\n end",
"def normalize_args(args)\n return args if args.nil?\n args.pop if args.size > 0 && args.last == '--'\n args\n end",
"def arguments\n args = OpenStudio::Ruleset::OSArgumentVector.new\n\n # URL of the DEnCity server that will be posted to\n hostname = OpenStudio::Ruleset::OSArgument::makeStringArgument('hostname', true)\n hostname.setDisplayName('URL of the DEnCity Server')\n hostname.setDefaultValue('http://www.dencity.org')\n args << hostname\n\n # DEnCity server user id at hostname\n user_id = OpenStudio::Ruleset::OSArgument::makeStringArgument('user_id',true)\n user_id.setDisplayName('User ID for DEnCity Server')\n args << user_id\n\n # DEnCIty server user id's password\n auth_code = OpenStudio::Ruleset::OSArgument::makeStringArgument('auth_code', true)\n auth_code.setDisplayName('Authentication code for User ID on DEnCity server')\n args << auth_code\n\n # Building type for DEnCity's metadata\n building_type = OpenStudio::Ruleset::OSArgument::makeStringArgument('building_type', false)\n building_type.setDisplayName('Building type')\n args << building_type\n\n # HVAC system for DEnCity's metadata\n primary_hvac = OpenStudio::Ruleset::OSArgument::makeStringArgument('primary_hvac', false)\n primary_hvac.setDisplayName('Primary HVAC system in building')\n args << primary_hvac\n\n args\n\n end",
"def arrayize_arguments(args)\n # Go through trailing arguments and suck them in if they don't seem\n # to have an owner.\n array = []\n until args.empty? || args.first.match(/^-/)\n array << args.shift\n end\n array\n end",
"def get_command_line_argument\n if ARGV.empty?\n puts \"Usage: ruby lookup.rb <domain>\" \n exit\n end ARGV.first # get frst argument in commnad line\nend",
"def to_argv\n flags = []\n each do |f,v|\n m = f.to_s.size == 1 ? '-' : '--'\n case v\n when Array\n v.each{ |e| flags << \"#{m}#{f}='#{e}'\" }\n when true\n flags << \"#{m}#{f}\"\n when false, nil\n # nothing\n else\n flags << \"#{m}#{f}='#{v}'\"\n end\n end\n flags\n end",
"def command_line_options\n opts = GetoptLong.new(*ARG_SPEC)\n options = {}\n opts.each do |opt,arg|\n opt[0,2] = ''\n opt = opt.to_sym\n case opt\n when :help\n puts usage(\"[param ...]\")\n exit 0\n else\n options[opt] = arg ? arg : true\n end\n end\n options\nend",
"def read_from_cmdline\n require \"shellwords.rb\"\n words = Shellwords.shellwords(\n if not ARGV.empty?\n ARGV.join(' ')\n else\n STDERR.print \"(offline mode: enter name=value pairs on standard input)\\n\" if STDIN.tty?\n readlines.join(' ').gsub(/\\n/, '')\n end.gsub(/\\\\=/, '%3D').gsub(/\\\\&/, '%26'))\n\n if words.find{|x| x =~ /=/} then words.join('&') else words.join('+') end\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def _list_args args\n incl = []\n excl = []\n args.each do |e| \n if e[0] == '+'\n incl << e[1..-1]\n elsif e[0] == '-'\n excl << e[1..-1]\n else\n incl << e\n end\n end\n incl = nil if incl.empty?\n excl = nil if excl.empty?\n return incl, excl\n end",
"def parse_args\n args = {\n :stack_name => nil,\n :parameters => {},\n :interactive => false,\n :region => default_region,\n :profile => nil,\n :nopretty => false,\n :s3_bucket => nil,\n }\n ARGV.slice_before(/^--/).each do |name, value|\n case name\n when '--stack-name'\n args[:stack_name] = value\n when '--parameters'\n args[:parameters] = Hash[value.split(/;/).map { |pair| parts = pair.split(/=/, 2); [ parts[0], Parameter.new(parts[1]) ] }]\n when '--interactive'\n args[:interactive] = true\n when '--region'\n args[:region] = value\n when '--profile'\n args[:profile] = value\n when '--nopretty'\n args[:nopretty] = true\n when '--s3-bucket'\n args[:s3_bucket] = value\n end\n end\n\n args\nend",
"def extract_extra_args(images)\n if idx = ARGV.find_index(\"--\")\n extra_args = ARGV[idx + 1..-1]\n extra_args.each { |a| images.delete(a) }\n end\n\n [images, extra_args]\n end",
"def vars(argv = [])\n argv\n end",
"def read_from_cmdline\n require \"shellwords\"\n\n string = unless ARGV.empty?\n ARGV.join(' ')\n else\n if STDIN.tty?\n STDERR.print(\n %|(offline mode: enter name=value pairs on standard input)\\n|\n )\n end\n array = readlines rescue nil\n if not array.nil?\n array.join(' ').gsub(/\\n/n, '')\n else\n \"\"\n end\n end.gsub(/\\\\=/n, '%3D').gsub(/\\\\&/n, '%26')\n\n words = Shellwords.shellwords(string)\n\n if words.find{|x| /=/n.match(x) }\n words.join('&')\n else\n words.join('+')\n end\n end",
"def exiftool_args\n fail MultiExiftool::Error, 'No filenames.' if filenames.empty?\n cmd = []\n cmd << Reader.mandatory_args\n cmd << options_args\n cmd << tags_args\n cmd << filenames\n cmd.flatten\n end",
"def args\n @args\n end",
"def command_line\r\n ARGV.each do |arg|\r\n if arg == \"instructions\"\r\n instructions\r\n elsif arg == \"calculator\"\r\n ask_for_digits\r\n else\r\n \r\n end\r\n end\r\n \r\n end",
"def command_line_arguments(array)\n array.size.times do\n if array.include?('-nc')\n colour_changer(:white)\n array.delete('-nc')\n elsif array.any? { |x| ['-d1', '-d2', '-d3', '-d4'].include? x }\n key = (array[0])[1, 2].to_sym\n @difficulty = DIFFICULTY[key]\n @promptarr = prompt_select(key)\n @intro = false\n end\n end\n end",
"def optparse_args\n if short\n [\"--#{name}\", \"-#{short}\", desc, :REQUIRED]\n else\n [\"--#{name}\", desc, :REQUIRED]\n end\n end",
"def args\n defined_args = []\n self.class.init_args.keys.each do | arg |\n if arg_val = send(arg)\n defined_args << \":#{arg} => #{arg_val}\"\n end\n end\n defined_args.join(\",\\n\")\n end",
"def arguments\n args = OpenStudio::Measure::OSArgumentVector.new\n\n id = OpenStudio::Measure::OSArgument.makeStringArgument('feature_id', false)\n id.setDisplayName('Feature unique identifier')\n id.setDefaultValue('1')\n args << id\n\n name = OpenStudio::Measure::OSArgument.makeStringArgument('feature_name', false)\n name.setDisplayName('Feature scenario specific name')\n name.setDefaultValue('name')\n args << name\n\n feature_type = OpenStudio::Measure::OSArgument.makeStringArgument('feature_type', false)\n feature_type.setDisplayName('URBANopt Feature Type')\n feature_type.setDefaultValue('Building')\n args << feature_type\n\n feature_location = OpenStudio::Measure::OSArgument.makeStringArgument('feature_location', false)\n feature_location.setDisplayName('URBANopt Feature Location')\n feature_location.setDefaultValue('0')\n args << feature_location\n\n # make an argument for the frequency\n reporting_frequency_chs = OpenStudio::StringVector.new\n reporting_frequency_chs << 'Detailed'\n reporting_frequency_chs << 'Timestep'\n reporting_frequency_chs << 'Hourly'\n reporting_frequency_chs << 'Daily'\n # reporting_frequency_chs << 'Zone Timestep'\n reporting_frequency_chs << 'BillingPeriod' # match it to utility bill object\n ## Utility report here to report the start and end for each fueltype\n reporting_frequency_chs << 'Monthly'\n reporting_frequency_chs << 'Runperiod'\n\n reporting_frequency = OpenStudio::Measure::OSArgument.makeChoiceArgument('reporting_frequency', reporting_frequency_chs, true)\n reporting_frequency.setDisplayName('Reporting Frequency')\n reporting_frequency.setDescription('The frequency at which to report timeseries output data.')\n reporting_frequency.setDefaultValue('Timestep')\n args << reporting_frequency\n\n return args\n end",
"def get_arguments(cmd)\n (main_args, sub_command, sub_args) = split_command(cmd)\n int_sub_command = '' #sub_command\n until sub_args.empty? do\n (int_main_args, int_sub_command, sub_args) = split_command(sub_args)\n end\n return [sub_command, int_sub_command]\nend",
"def arguments()\n args = OpenStudio::Measure::OSArgumentVector.new\n \n #make an argument for the frequency\n reporting_frequency_chs = OpenStudio::StringVector.new\n reporting_frequency_chs << \"Detailed\"\n reporting_frequency_chs << \"Timestep\"\n reporting_frequency_chs << \"Hourly\"\n reporting_frequency_chs << \"Daily\"\n reporting_frequency_chs << \"Monthly\"\n reporting_frequency_chs << \"Runperiod\"\n reporting_frequency = OpenStudio::Ruleset::OSArgument::makeChoiceArgument('reporting_frequency', reporting_frequency_chs, true)\n reporting_frequency.setDisplayName(\"Reporting Frequency\")\n reporting_frequency.setDefaultValue(\"Hourly\")\n args << reporting_frequency\n \n # TODO: argument for subset of output meters\n \n return args\n end",
"def arguments(model)\n args = OpenStudio::Measure::OSArgumentVector.new\n\n # Make argument for zipcode\n zipcode = OpenStudio::Measure::OSArgument.makeIntegerArgument('zipcode', true)\n zipcode.setDisplayName('Zip Code for project')\n zipcode.setDescription('Enter valid us 5 digit zipcode')\n zipcode.setDefaultValue(80401)\n args << zipcode\n\n # make an argument for use_upstream_args\n use_upstream_args = OpenStudio::Measure::OSArgument.makeBoolArgument('use_upstream_args', true)\n use_upstream_args.setDisplayName('Use Upstream Argument Values')\n use_upstream_args.setDescription('When true this will look for arguments or registerValues in upstream measures that match arguments from this measure, and will use the value from the upstream measure in place of what is entered for this measure.')\n use_upstream_args.setDefaultValue(true)\n args << use_upstream_args\n\n return args\n end",
"def parse_args()\n opts = GetoptLong.new(\n ['--host', GetoptLong::OPTIONAL_ARGUMENT],\n ['--port', GetoptLong::OPTIONAL_ARGUMENT],\n ['--columns', GetoptLong::OPTIONAL_ARGUMENT],\n ['--index', GetoptLong::REQUIRED_ARGUMENT],\n ['--type', GetoptLong::REQUIRED_ARGUMENT]\n )\n\n opts.each do |opt, arg|\n case opt\n when '--host'\n @host = arg\n when '--port'\n @port = arg\n when '--columnns'\n @cols = arg.split(\",\")\n when '--index'\n @index = arg\n when '--type'\n @type = arg\n end\n end\n\n if @index.nil?\n STDERR.puts 'missing argument: --index'\n exit 1\n end\n\n if @type.nil?\n STDERR.puts 'missing argument: --type'\n exit 1\n end\n\n if ARGV.length != 1\n STDERR.puts 'Missing argument: file'\n exit 1\n end\n\n @file = ARGV.shift\nend",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n \n return args\n end",
"def options\n @options ||= args.dig(:options) || {}\n end",
"def exiftool_args\n fail MultiExiftool::Error, 'No filenames.' if filenames.empty?\n cmd = []\n cmd << Writer.mandatory_args\n cmd << options_args\n cmd << values_args\n cmd << filenames\n cmd.flatten\n end",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n return args\n end",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n return args\n end",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n return args\n end",
"def arguments(model)\n args = OpenStudio::Ruleset::OSArgumentVector.new\n return args\n end"
] | [
"0.731129",
"0.72981286",
"0.7187144",
"0.71825874",
"0.71825874",
"0.71825874",
"0.71825874",
"0.71825874",
"0.71825874",
"0.71825874",
"0.71793294",
"0.71138155",
"0.7080656",
"0.7080656",
"0.70412344",
"0.7027032",
"0.7008863",
"0.6998146",
"0.6962612",
"0.6871486",
"0.6844077",
"0.68380517",
"0.68367505",
"0.6822741",
"0.6814971",
"0.67593163",
"0.66567254",
"0.66489613",
"0.6634229",
"0.6614363",
"0.6586645",
"0.65773106",
"0.65528846",
"0.65411323",
"0.65066797",
"0.64958525",
"0.6468103",
"0.64587873",
"0.64403105",
"0.64271486",
"0.6400915",
"0.63954264",
"0.6394793",
"0.6372356",
"0.63537747",
"0.63429594",
"0.63407755",
"0.63296753",
"0.63010794",
"0.62926865",
"0.6281208",
"0.62604845",
"0.6254236",
"0.62448466",
"0.6243474",
"0.62343776",
"0.62298536",
"0.62058926",
"0.6201841",
"0.61943024",
"0.6187443",
"0.61814773",
"0.61703295",
"0.6153089",
"0.6150669",
"0.613473",
"0.61247754",
"0.6113441",
"0.6100385",
"0.60954523",
"0.6092528",
"0.6079425",
"0.60697633",
"0.6069462",
"0.60445195",
"0.6042948",
"0.604155",
"0.604155",
"0.60404754",
"0.6038954",
"0.60370386",
"0.60317135",
"0.6031099",
"0.6010736",
"0.600801",
"0.600628",
"0.600571",
"0.5988485",
"0.598623",
"0.59834045",
"0.5975682",
"0.59683216",
"0.59661025",
"0.595762",
"0.595762",
"0.595033",
"0.59381163",
"0.593799",
"0.593799",
"0.593799",
"0.593799"
] | 0.0 | -1 |
We are going to avoid indexing of README.md when there is also an index.md in the same directory, to keep behavior consistent with the display logic | def skip_index?(file)
if file.end_with? 'README.md'
dir = File.dirname file
File.exist? "#{dir}/index.md"
else
false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def buildReadMeIndex\n x = 0\n fileOutArr = Array.new\n\n # Build each Fn Command Card\n fileOutArr.push \"# Fn Command Reference\\n\\n\"\n @cmdListArr.each do |command|\n # Start markdown file\n fileOutArr.push \"[\" + command + \"](\" + @fileNameArr[x] + \".md\" + \") \\n\"\n x = x + 1 # Counter for syncing arrays\n end\n\n # Add Fn Version\n fileOutArr.push(\"\\n<sub>\" + @fnVersion + \"</sub>\")\n\n # Write README.md file to disk\n puts \"Writing: \" + \"README\" + \".md\"\n File.open(\"README.md\", \"w\") do |f|\n f.puts(fileOutArr)\n end\n\n end",
"def help\n path = Dir[src_path('README')].first || Dir[src_path('README.md')].first || Dir[src_path('README.markdown')].first\n if path\n File.read path\n end\n end",
"def show_readme\n readme 'lib/generators/cms/fortress/templates/README'\n end",
"def about\n require \"github/markup\"\n @readme = GitHub::Markup.render(\"README.md\", File.read(\"README.md\")).html_safe\n end",
"def show_readme\n readme \"README\" if behavior == :invoke\n end",
"def show_readme\n readme \"README\" if behavior == :invoke\n end",
"def show_readme\n readme \"README\" if behavior == :invoke\n end",
"def find_readme repo_dir\n files = Dir.entries(repo_dir).sort\n files.each { |f| return f if f.downcase.include? 'readme' }\n return ''\nend",
"def readme\n txt = File.open(Rails.root.join('README.rdoc'), 'r').read\n @readme = RDoc::Markup::ToHtml.new(RDoc::Options.new).convert(txt)\n end",
"def readme\n template 'README.md.erb', 'README.md'\n # This is here, because if --skip_tests is added, it skips running the gitignore function\n # => since all projects have a readme this should just overwrite the gitignore file\n copy_file 'dotfiles/gitignore', '.gitignore', force: true\n end",
"def test_index\n create_document(\"about.md\") # setup necessary data\n create_document(\"changes.txt\")\n\n get \"/\"\n \n assert_equal(200, last_response.status)\n assert_equal(\"text/html;charset=utf-8\", last_response[\"Content-Type\"])\n assert_includes(last_response.body, \"about.md\")\n assert_includes(last_response.body, \"changes.txt\")\n\n end",
"def find_readme\n\t\tfile = self.project_files.find {|file| file =~ /^README\\.(md|rdoc)$/ }\n\t\tif file\n\t\t\treturn Pathname( file )\n\t\telse\n\t\t\tself.prompt.warn \"No README found in the project files.\"\n\t\t\treturn DEFAULT_README_FILE\n\t\tend\n\tend",
"def generate_readme\n readme_path = File.join(Glitter::Application.config.repo_path, 'readme.md')\n File.write(\n readme_path,\n <<-RUBY.strip_heredoc)\n ## HEAD\n\n * bullet1\n * bullet2\n RUBY\n readme_path\n end",
"def process\n filename = \"index.markdown\"\n markdowns = {filename => []} \n state = :message\n message = [\"\\n\"]\n patch = []\n commit = nil\n (@gitlogp.split(\"\\n\")+[\"DONE\"]).each { |line|\n words=line.split\n if line.slice(0,1)==\" \" || words.length==0\n # commit messages start with 4 spaces, diff contents with 1 space\n if state==:message\n if words[0]==\"OUTPUT_FILE:\"\n filename = words[1]\n markdowns[filename] ||= []\n else\n message << \"#{line.slice(4..-1)}\"\n end\n else\n patch << \" #{line}\" if state==:patch\n end\n elsif words[0]==\"commit\" or words[0]==\"DONE\"\n if !commit.nil?\n # replace the short description line with a named link\n shortlog = message[2]\n message[2] = \"<a name='#{shortlog}'> </a>\"\n markdowns[filename] += message.map {|l|\n if l==\"SHOW_PATCH\"\n (patch+[\"{: .diff}\\n\"]).join(\"\\n\")\n else\n l\n end\n }\n series = tags[commit].slice(-2..-1)\n markdowns[filename] << \"\\n#{tags[commit]}: [view on github](#{@commit_link_base}#{commit}), [download #{series}-#{shortlog}.patch](#{@patch_link_base}/#{series}-#{shortlog}.patch)\\n{: .commit}\\n\"\n end\n \n message=[\"\\n\"]\n patch=[]\n\n commit = words[1]\n state = :message\n elsif [\"Author:\", \"Date:\", \"new\", \"index\", \"---\", \"+++\", '\\\\'].include?(words[0])\n # chomp\n elsif words[0]==\"diff\"\n state = :patch\n left = words[2].slice(2..-1)\n right = words[3].slice(2..-1)\n if left==right\n patch << \" ::: #{right}\"\n else\n patch << \" ::: #{left} -> #{right}\"\n end\n elsif words[0]==\"@@\"\n # git tries to put the function or class name after @@. This\n # works great for C diffs, but it only finds the class name in\n # Ruby, which is usually similar to the file name, Therefore\n # it's distracting cruft. Toss it.\n patch << \" #{words.slice(0,4).join(\" \")}\"\n else\n message << \"#{line.slice(4..-1)}\" if state==:message\n patch << \" #{line}\" if state==:patch \n end\n }\n output = {}\n markdowns.each do |fn, markdown|\n output[fn] = markdown.join(\"\\n\")\n Rails.logger.info(output[fn]) if respond_to? :Rails\n end\n return output\n end",
"def buildParentReadMeIndex\n x = 0\n fileOutArr = Array.new\n\n # Build list of reference pages\n fileOutArr.push \"### Fn Command Reference\\n\\n\"\n @cmdListArr.each do |command|\n # Add each command to output\n fileOutArr.push \"[\" + command + \"](ref/\" + @fileNameArr[x] + \".md\" + \") \\n\"\n x = x + 1 # Counter for syncing arrays\n end\n \n # Add Fn Version\n fileOutArr.push(\"\\n<sub>\" + @fnVersion + \"</sub>\")\n\n\n # Write REFLIST.md file to disk\n puts \"Writing: \" + \"REFLIST\" + \".md\"\n File.open(\"REFLIST.md\", \"w\") do |f|\n f.puts(fileOutArr)\n end\n\n end",
"def readme(path)\n log File.read(find_in_source_paths(path))\n end",
"def readme(path)\n log File.read(find_in_source_paths(path))\n end",
"def readme(path)\n log File.read(find_in_source_paths(path))\n end",
"def readme\n extract_tarball_readme do |extraction_errors, readme|\n if extraction_errors.any?\n Document.new\n else\n readme\n end\n end\n end",
"def add_readme(project)\n file_path = generate_readme\n file = [ActionDispatch::Http::UploadedFile.new(\n tempfile: File.new(file_path),\n filename: 'readme.md'\n )]\n project.add_images(\n 'master',\n nil,\n file,\n project.user.git_author_params\n )\n end",
"def build_mdlinks\n return unless options.Build_Markdown_Links\n\n puts_cyan \"Middlemac is creating `#{options.File_Markdown_Links}`.\"\n\n files_array = []\n out_array = []\n longest_shortcut = 0\n longest_path = 0\n\n Dir.glob(\"#{app.source}/Resources/**/*.erb\").each do |fileName|\n\n # Remove all file extensions and make a shortcut\n base_name = fileName\n while File.extname(base_name) != '' do\n base_name = File.basename( base_name, '.*' )\n end\n next if base_name.start_with?('_') # || base_name == 'index'\n\n if base_name == 'index'\n shortcut = \"[#{File.split(File.split(fileName)[0])[1]}_index]:\"\n\n else\n shortcut = \"[#{base_name}]:\"\n end\n\n # Make a fake absolute path\n path = Pathname.new(fileName).relative_path_from(Pathname.new(app.source))\n path = File::SEPARATOR + File.join(File.dirname(path), base_name) + '.html'\n\n # Get the title, if any\n metadata = YAML.load_file(fileName)\n title = (metadata.is_a?(Hash) && metadata.key?('title')) ? metadata['title'] : ''\n\n files_array << { :shortcut => shortcut, :path => path, :title => title }\n\n longest_shortcut = shortcut.length if shortcut.length > longest_shortcut\n longest_path = path.length if path.length > longest_path\n\n end\n\n files_array = files_array.sort_by { |key| [File.split(key[:path])[0], key[:path]] }\n files_array.uniq.each do |item|\n item[:shortcut] = \"%-#{longest_shortcut}.#{longest_shortcut}s\" % item[:shortcut]\n\n if item[:title].length == 0\n out_array << \"#{item[:shortcut]} #{item[:path]}\"\n else\n item[:path] = \"%-#{longest_path}.#{longest_path}s\" % item[:path]\n out_array << \"#{item[:shortcut]} #{item[:path]} \\\"#{item[:title]}\\\"\"\n end\n end\n\n File.open(options.File_Markdown_Links, 'w') { |f| out_array.each { |line| f.puts(line) } }\n\n end",
"def replace_readme(&block)\n remove_file 'README.doc'\n remove_file 'README.md'\n\n create_file 'README.md'\n append_file 'README.md', yield\nend",
"def directory_index\n end",
"def index\n return not_found unless request.format.to_sym.in?(%i[html md]) || !request.path.include?('.')\n\n redirect_to help_page_path(library: library,\n section: index_section,\n subsection: index_subsection,\n display_as: display_as)\n end",
"def markdown_filter_link_index(markdown)\n return markdown if index.nil? || index.empty?\n\n markdown << \"\\n\\n\"\n index.each { |ref| markdown << \"[#{ref.name}]: #{ref.url}\\n\" }\n markdown\n end",
"def markdown_cheatsheet\n File.read(Rails.root.join('markdown.md'))\n end",
"def update_readme\n snippet = <<~CODE\n <tr>\n <td>DATABASE_URL</td>\n <td>Yes</td>\n <td>\n `sqlite3:db/test.db` (for the test environment <em>only</em>)\n </td>\n <td>\n Connection URL to the database. The format varies according\n database adapter. Refer to the documentation for the adapter\n you're using for more information. Some examples:\n <dl>\n <dt>Sqlite3</dt>\n <dd>`sqlite3:db/development.db`</dd>\n <dt>PostgreSQL</dt>\n <dd>`postgresql://localhost/myapp_development?pool=5`</dd>\n </dl>\n </td>\n </tr>\n CODE\n\n insert_into_file('README.md', after: /<tbody>\\n/) do\n indent(snippet, 8)\n end\n end",
"def readme\n\tsystem('touch README.md')\n\tfile = File.open('README.md', 'w')\n\tfile.puts('this project is written in the ruby computer science language')\n\tfile.puts('more info on https://ruby-doc.org')\n\tfile.puts('')\n\tfile.puts('r e g a r d s')\n\tfile.puts('')\n\tfile.puts('j')\n\tfile.close\nend",
"def create_readme()\n file=File.open(\"README.md\", \"w\")\n file.puts('#Ruby app')\n file.puts(\"***\")\n file.puts(\"##README automatically generated\")\n file.puts(\"In this repo you can found :\")\n file.puts(\"* Directory lib'\")\n file.puts(\"* Directory spec'\")\n file.puts(\"* Gemfile'\")\n file.puts('***')\n file.puts(\"##A list of Gems used within the project:\")\n file.puts('gem rubocop')\n file.puts('gem pry')\n file.puts('gem dotenv')\n file.puts('gem rspec')\n file.close\nend",
"def load_old_index\n file = File.open('/home/matt/Documents/programming/ruby/dmsw/index.html', 'rb')\n html = file.read.chomp\n file.close\n return html\nend",
"def fs_to_doc_path(fs_path)\n fs_path.gsub /(README)?\\.md$/, \"\"\n end",
"def readme(builder)\n file = builder.repo.root.children.find do |val|\n val =~ /readme/i\n end\n\n return unless file\n file.safe_copy(builder.repo.cache_dir, {\n :root => file.parent\n })\n end",
"def title\n if file =~ /README.md/\n result = File.basename File.dirname(file)\n else\n result = File.basename(file,'.md')\n end\n result.tr '-', ' '\n end",
"def doc( path )\n get \"#{path}.html\" do\n password_protected!\n \n endpoint = path.split('/').last\n \n template_path = File.join( VIEW_PATH, \"#{endpoint}.md\" )\n rendered = GitHub::Markup.render( template_path )\n\n erb :layout, :locals => { :content => rendered }\n end\nend",
"def doc_file_path\n \"#{@structure[:working_path]}/docs/#{@structure[:full_relation_name]}.md\"\n end",
"def show_documentation req, res\n store, path = documentation_source req.path\n\n if_modified_since req, res, store.cache_path\n\n generator = generator_for store\n\n case path\n when nil, '', 'index.html' then\n res.body = generator.generate_index\n when 'table_of_contents.html' then\n res.body = generator.generate_table_of_contents\n when 'js/search_index.js' then\n documentation_search store, generator, req, res\n else\n documentation_page store, generator, path, req, res\n end\n ensure\n res.content_type ||= 'text/html'\n end",
"def open_index_file\n end",
"def create_docs\n directory 'templates/docs', 'docs'\nend",
"def erb_path\n \"#{File.dirname(__FILE__)}/markdown_doc.erb\"\n end",
"def docs\n options.verbose? ? @@log.level = Logger::DEBUG : @@log.level = Logger::ERROR\n repo_parent_dir = File.expand_path(\"#{File.dirname(__FILE__)}/../../\") \n remove_dir \"#{repo_parent_dir}/origin-docs\"\n empty_directory \"#{repo_parent_dir}/origin-docs\"\n \n unless File.exist?(\"/tmp/yard-js\")\n run \"git clone git://github.com/lsegal/yard-js /tmp/yard-js\"\n inside(\"/tmp/yard-js\") { run \"bundle install\" }\n end\n \n inside(repo_parent_dir) do\n doc_files = Dir[\"#{repo_parent_dir}/origin-server/documentation/*.md\"].join(\",\")\n inside(\"origin-server/documentation\") { run \"yardoc --markup=markdown --output-dir '#{repo_parent_dir}/origin-docs' --files #{doc_files}\" }\n run %{yardoc --output-dir '#{repo_parent_dir}/origin-docs/broker' --main origin-server/documentation/broker.md --private --protected --exclude test \\\n #{Dir[\"origin-server/broker/**/*.rb\"].join(' ')} \\\n #{Dir[\"origin-server/controller/**/*.rb\"].join(' ')}}\n run %{yardoc --output-dir '#{repo_parent_dir}/origin-docs/rest_api' --main origin-server/documentation/rest_api.md --api REST \\\n #{Dir[\"origin-server/controller/**/*.rb\"].join(' ')}}\n run \"yardoc --output-dir '#{repo_parent_dir}/origin-docs/broker_models' --main origin-server/documentation/broker_models.md --private --protected --api model #{Dir[\"origin-server/controller/**/*.rb\"].join(' ')}\"\n run \"yardoc --output-dir '#{repo_parent_dir}/origin-docs/node' --main origin-server/documentation/node.md --private --protected --exclude test #{Dir[\"origin-server/node/**/*.rb\"].join(' ')}\"\n run \"yardoc --output-dir '#{repo_parent_dir}/origin-docs/common' --main origin-server/documentation/common.md --private --protected --exclude test #{Dir[\"origin-server/common/**/*.rb\"].join(' ')}\"\n run \"yardoc --output-dir '#{repo_parent_dir}/origin-docs/build-tools' --main origin-dev-tools/README.md --private --protected --exclude test #{Dir[\"origin-dev-tools/build/*\"].join(' ')}\"\n end\n end",
"def doc_to_fs_path(doc_path)\n if doc_path.end_with?(\"/\") || doc_path == \"\"\n \"#{doc_path}README.md\"\n else\n \"#{doc_path}.md\"\n end\n end",
"def contents()\n html = Nokogiri::HTML(@markdown_document.to_html())\n\n # Fixup refs to other markdown documents\n html.css(\"a\").each do |anchor|\n anchor[\"href\"] = anchor[\"href\"].sub(%r{\\.md$}, \".html\")\n end\n\n # Since we transform device-specific $device/README.md pages into\n # discrete $device.html, we need to fixup cross-linking into its namespace\n # This could be generalized some more, to be fixed once we have other internal links to mismatched README.md/index.html locations.\n if File.dirname(relative_output) == \"devices\"\n html.css(\"a\").each do |anchor|\n if anchor[\"href\"].match(%r{\\.\\./[^\\.]+$})\n anchor[\"href\"] = anchor[\"href\"].sub(%r{\\.\\./}, \"devices/\") + \".html\"\n end\n end\n end\n\n # Since Nokogiri produces a complete document from our fragment, we\n # have to pick only what's in the body; so strip the body added tags and higher-up tags.\n html\n .at_css(\"body\").to_s()\n .sub(%r{^<body>}, \"\").sub(%r{</body>$}, \"\")\n end",
"def show_link(str)\n cfg = @@cfg\n str.gsub!(' ','-')\n str << '.md'\n show_doc(cfg, str)\n end",
"def render_md(site, readme)\n begin\n mkconverter = site.getConverterImpl(Jekyll::Converters::Markdown)\n readme.gsub! \"```\",\"\\n```\"\n readme.gsub! '```shell','```bash'\n return mkconverter.convert(readme)\n rescue\n return 'Could not convert readme.'\n end\nend",
"def markdown\n return if changed_markdown_files.empty?\n\n output = `mdl #{changed_markdown_files.join(' ')}`\n return if output&.empty?\n\n heading('Markdown Linter', output)\n end",
"def contstruct_readme\n config = template.config\n\n s = []\n s << \"# %s - %s\" % [config[:name], config[:summary]]\n s << \"## SYNOPSIS\"\n s << Array(usage).join(\"\\n\")\n s << \"## DESCRIPTION\"\n s << config[:description]\n s << \"## COPYRIGHT\"\n s << config[:copyright]\n s.join(\"\\n\\n\")\n end",
"def extract_homepage\n\t\treturn fail_extraction( :homepage, \"no README\" ) unless self.readme\n\n\t\tlist = self.readme.parts.find {|part| RDoc::Markup::List === part && part.type == :NOTE } or\n\t\t\treturn fail_extraction(:homepage, \"No NOTE list\")\n\t\titem = list.items.find {|item| item.label.include?('home') } or\n\t\t\treturn fail_extraction(:homepage, \"No `home` item\")\n\n\t\treturn item.parts.first.text\n\tend",
"def docs = require_relative 'scaffold/docs'",
"def visit(dir)\n # Create the directory.\n Dir.mkdir(target_path(dir))\n\n # Collect links to put on index later.\n links = []\n\n # Convert .md files to .html files.\n render_source_files = Dir.glob(File.join(dir, \"*.md\"))\n posts = render_source_files.collect{|source| Post.load_file(source)}\n\n posts_with_date = posts.select{|post| not post.date.nil?}.sort_by{|post| post.date}\n unless posts_with_date.empty?\n posts_with_date[0..-2].each_with_index do |post, i|\n later_post = posts_with_date[i+1]\n post.next_post = later_post\n later_post.prev_post = post\n end\n end\n\n posts.each do |post|\n rendered_post = POST_TEMPLATE.result(post.get_binding)\n page = Page.new(title: post.title, body: rendered_post)\n result = DEFAULT_TEMPLATE.result(page.get_binding)\n f = File.open(target_path(post.source), \"w\")\n f.write(result)\n\n links << Link.new(\n url: post.url,\n title: post.title,\n date: post.date,\n )\n end\n\n # Copy other files.\n entries = Dir.glob(File.join(dir, \"*\"))\n source_subdirs = entries.select{|e| File.directory?(e)}\n mirror_source_files = entries - render_source_files - source_subdirs\n\n mirror_source_files.each do |source|\n FileUtils.cp(source, target_path(source))\n links << Link.new(url: File.basename(target_path(source)))\n end\n\n # Recursively visit subdirectories.\n source_subdirs.each do |subdir|\n visit(subdir)\n links << Link.new(url: File.basename(target_path(subdir)))\n end\n\n # Create index if it's not already there.\n index_target_path = File.join(target_path(dir), \"index.html\")\n unless File.exist?(index_target_path)\n index = Index.new(\n links: links,\n dirname: File.join(target_url(target_path(dir)), \"/\"),\n )\n result = INDEX_TEMPLATE.result(index.get_binding)\n page = Page.new(title: index.dirname, body: result)\n result = DEFAULT_TEMPLATE.result(page.get_binding)\n f = File.open(index_target_path, \"w\")\n f.write(result)\n end\nend",
"def to_md\n @md ||= [\n \"[🔙 🏡](#{project_readme_path})\",\n '',\n \"# #{title} (#{year})\",\n '',\n \"By the #{adjective} [#{author}](#{homepage})\",\n '',\n '## Links:',\n '',\n \"- [Purchase #{title}](#{purchase})\",\n '',\n '## Chapter Notes:',\n ''\n ].concat(section_md).concat(image_md).join \"\\n\"\n end",
"def render\n markdown = ReadmeTemplate.new(\n metadata: @metadata, tasks: @tasks, credit: @credit\n )\n template_path = File.join(\n File.dirname(File.expand_path(__FILE__)),\n '../../templates/readme.md.erb'\n )\n readme = markdown.render(File.read(template_path))\n File.open(\"#{@cookbook}/README.md\", 'w') { |file| file.write(readme) }\n end",
"def traverse_nav_markdown(node)\n\t\n\t\t# traverse subfolders, go deep\n\t\tif node_has_children(node)\n\t\t\t node.children.items.each_with_index do |child|\n\n\t\t\t\t items = traverse_nav_markdown(child)\n\t\t\t\t child.children = Map.new unless child.children?\n\t\t\t\t child.children.count = 0 unless child.children.count?\n\t\t\t\t child.children.items = [] unless child.children.items?\n\t\t \n\t\t\t\t child.children.count = items.size\n\t\t\t\t child.children.items = items\t\t\t\t\n\n\t\t\t end\n\t\tend\n\t\n\t\tnode_list = nil\n\t\tif node.children? and node.children.items?\n\t\t\tnode_list = node.children.items\n\t\tend\n\t\n\t\tmarkdowns = Dir.glob(\"#{node.source_path}/*.markdown\")\n\t\n\t\t# if we are at the root node (content source), don't process markdowns here (home.markdown handled separately, special)\n\t\tmarkdowns = [] if node.nav_level == 0\n\t\n\n\t\tif markdowns.size > 0 and node.nav_level > 0\n\n\t\t\t#puts\n\t\t\t#puts \"#{node.source} - #{node.children?}\"\n\t\t\tnode.children = Map.new unless node.children?\n\t\t\tnode.children.count = 0 unless node.children.count?\n\t\t\tnode.children.items = [] unless node.children.items?\n\t\t\t#puts \"#{node.source} - #{node.children?} - #{node.children.count?}\"\n\t\t\n\t\t\tnode_list = node.children.items\n\t\t\t\n\t\t\tmarkdowns.each do |md|\t\t\t\t\t\t\n\t\t\t\tsource = md.gsub(/#{node.source_path}\\//, \"\").gsub(/.markdown/, \"\")\n\t\t\t\n\t\t\t\tis_cbdoc_special_file = source.start_with? \"aaab-\"\n\t\t\t\n\t\t\t\tunless is_cbdoc_special_file\t\t\t\t\n\t\t\t\t\n\t\t\t\t\tif node.link?\n\t\t\t\t\t\tlink = node.link + \"/\" + source \n\t\t\t\t\telse\t\t \n\t\t\t\t\t\tnode.link = \"undefined\"\n\t\t\t\t\t\tputs node.nav_type\n\t\t\t\t\t\texit\n\t\t\t\t\tend\n\t\t\t\t\n\t\t\t\t\tsource_path = node.source_path + \"/\" + source\t\t\t \n\n\t\t\t\t\tis_markdown_and_folder = (File.exist?(\"#{source_path}\") && File.directory?(\"#{source_path}\"))\n\t\t\t\n\t\t\t\t\tunless is_markdown_and_folder\t\t \n\t\t\t\t\t\n\t\t\t\t\t\tfull_link = (node.link.start_with?(\"/#{CONTENT_LINK_PREFIX}/\") ?\tlink : \"/#{CONTENT_LINK_PREFIX}/\" + link )\n\t\t\t\t\t\tparent_path = node.source_path\n\t\t\t\t\t\tparent_full_link = (node.link.start_with?(\"/#{CONTENT_LINK_PREFIX}/\") ?\t node.link : \"/#{CONTENT_LINK_PREFIX}/\" + node.link )\n\t\t\t\t\n\t\t\t\t\t\titem = Map.new({\n\t\t\t\t\t\t\tdoctype: \"nav\",\n\t\t\t\t\t\t\tsubtype: \"nav_\" + (node.nav_level + 1).to_s,\n\t\t\t\t\t\t\tnav_type: \"markdown\",\n\t\t\t\t\t\t\tnav_level: node.nav_level + 1,\t\t\t\t\n\t\t\t\t\t\t\tnav_order: 9000,\n\t\t\t\t\t\t\tnav_title: process_navigation_name(source),\n\t\t\t\t\t\t\tsource: source,\n\t\t\t\t\t\t\tlink: link,\n\t\t\t\t\t\t\tfull_link: full_link,\n\t\t\t\t\t\t\tparent: node.source,\n\t\t\t\t\t\t\tparent_nav_title: node.nav_title,\n\t\t\t\t\t\t\tparent_link: node.link,\n\t\t\t\t\t\t\tparent_full_link: parent_full_link,\n\t\t\t\t\t\t\tparent_path: parent_path,\n\t\t\t\t\t\t\tsource_path: source_path,\n\t\t\t\t\t\t\tancestors: [],\n\t\t\t\t\t\t\tancestors_links: [],\n\t\t\t\t\t\t\tsiblings: [],\n\t\t\t\t\t\t\tsiblings_links: [],\n\t\t\t\t\t\t\tdescendants: [],\n\t\t\t\t\t\t\tdescendants_links: []\t\t\t\t \n\t\t\t\t\t\t})\n\t\t\t\t\t\n\t\t\t\t\t\tnode_list << item \n\t\t\t\t\tend\t\t \n\t\t\t\tend\n\t\t\tend\t\t \n\t\tend\n\t\n\t\t#ap node_list\n\t\n\t\tnode_list\n\tend",
"def installed_docs\n extra_counter = 0\n ri_paths.map do |path, type|\n store = RDoc::Store.new path, type\n exists = File.exist? store.cache_path\n\n case type\n when :gem then\n gem_path = path[%r%/([^/]*)/ri$%, 1]\n [gem_path, \"#{gem_path}/\", exists, type, path]\n when :system then\n ['Ruby Documentation', 'ruby/', exists, type, path]\n when :site then\n ['Site Documentation', 'site/', exists, type, path]\n when :home then\n ['Home Documentation', 'home/', exists, type, path]\n when :extra then\n extra_counter += 1\n store.load_cache if exists\n title = store.title || \"Extra Documentation\"\n [title, \"extra-#{extra_counter}/\", exists, type, path]\n end\n end\n end",
"def run(content, params={})\n markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML, autolink: true, space_after_headers: true)\n page_text = extract_text(markdown.render(item.raw_content))\n title = item[:title] || item.identifier\n file_name = item.identifier.to_s.gsub(/\\//,'_')\n puts \"Indexing page: #{@item.identifier} to #{@tmp_index}/#{file_name}.idx\"\n unless Dir.exists?(@tmp_index)\n Dir.mkdir(@tmp_index)\n end\n\n\n\n idx_file_name = \"#{@tmp_index}/#{file_name}.idx\"\n if File.exists?(idx_file_name)\n File.delete(idx_file_name)\n end\n File.open(idx_file_name,\"w+\") do |file|\n file.write({title: title, text: page_text, tags: \"api\", loc: @item.path }.to_json)\n end\n content\n end",
"def ending\n if File.exist?('CREDITS.md')\n @io.puts IO.read('CREDITS.md')\n @io.puts\n end\n\n if File.exist?('AUTHORS.md')\n @io.puts IO.read('AUTHORS.md')\n @io.puts\n end\n\n if File.exist?('LICENSE.md')\n @io.puts IO.read('LICENSE.md')\n @io.puts\n end\n @io.puts\n @io.puts \"Documentation generated #{Time.now.strftime('%Y-%m-%d %H:%M')}\"\n @io.puts\n @io.close\n end",
"def what_it_does() \"Generate javadoc to '#{@name}' folder\" end",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def list\n\t\tfiles.map! { |filename|\n\t\t\t{:title => file_to_pagename(filename), :link => filename.chomp(\".md\")}\n\t\t}\n\tend",
"def path\n @absolute_path.sub(%r{^#{Slimdown.config.location}/(.*)\\.md}, '\\1')\n end",
"def gen_main_index\n template = RDoc::TemplatePage.new @template::INDEX\n\n open 'index.html', 'w' do |f|\n classes = @classes.sort.map { |klass| klass.value_hash }\n\n values = {\n 'main_page' => @main_page,\n 'initial_page' => main_url,\n 'style_url' => style_url('', @options.css),\n 'title' => CGI.escapeHTML(@options.title),\n 'charset' => @options.charset,\n 'classes' => classes,\n }\n\n values['inline_source'] = @options.inline_source\n\n template.write_html_on f, values\n end\n end",
"def description_section\n section_of( 'README.md', 'DESCRIPTION')\n end",
"def pathSourceDoc\n\t\"./documentation/\"\nend",
"def help_text\n build_html do\n p <<P1\nThis page is a simple presentation of the paths that match the file\nthat was searched for an the fileset that the file was shipped in.\nP1\n end\n end",
"def support_rdoc_document_file!\n IO.read(\".document\").gsub(/^[ \\t]*#.+/m, '').split(/\\s+/)\n rescue Errno::ENOENT\n []\n end",
"def documentation\n\troot = settings.root + '/doc'\n\tcontent = File.open(\"#{root}/#{@user.default_locale}.textile\", 'r').read()\n\tRedCloth.new(content).to_html\nend",
"def html_markup_asciidoc(text); end",
"def generate_index\n setup\n\n template_file = @template_dir + 'index.rhtml'\n return unless template_file.exist?\n\n debug_msg \"Rendering the index page...\"\n\n out_file = @base_dir + @options.op_dir + 'index.html'\n rel_prefix = @outputdir.relative_path_from out_file.dirname\n search_index_rel_prefix = rel_prefix\n search_index_rel_prefix += @asset_rel_path if @file_output\n\n asset_rel_prefix = rel_prefix + @asset_rel_path\n\n @title = @options.title\n\n render_template template_file, out_file do |io|\n here = binding\n # suppress 1.9.3 warning\n here.local_variable_set(:asset_rel_prefix, asset_rel_prefix)\n here\n end\n rescue => e\n error = RDoc::Error.new \\\n \"error generating index.html: #{e.message} (#{e.class})\"\n error.set_backtrace e.backtrace\n\n raise error\n end",
"def croucher_index_file\n nil\n end",
"def rearrange_docs!; end",
"def parse_readme\n\t\treturn nil unless self.readme_file.readable?\n\n\t\tcase self.readme_file.extname\n\t\twhen '.md'\n\t\t\treturn RDoc::Markdown.parse( self.readme_file.read )\n\t\twhen '.rdoc'\n\t\t\treturn RDoc::Markup.parse( self.readme_file.read )\n\t\telse\n\t\t\traise \"Can't parse %s: unhandled format %p\" % [ self.readme_file, README_FILE.extname ]\n\t\tend\n\tend",
"def document(file_name)\n origin = File.open(\"#{file_name}\",'r')\n docs = File.open(\"docs/doc-#{file_name.split('/')[-1]}.html\",'w')\n comment = true\n script = Script.new(file_name)\n \n # Need to refactor this into some form of a rules engine. It's getting a bit hairy\n str = origin.each_line.inject(\"\") do |str, line|\n # Skip if it's a shebang\n if line =~ /^ *#!/\n str << \"\"\n \n # Blank Comment line, insert newline\n elsif line =~ /^ *# *$/\n comment = true\n str << \"\\n\"\n \n # Evaluate as text if it's a comment with spaces prior\n elsif line =~ /^ *#/\n str << \"\\n\" unless comment \n comment = true\n \n # Find the meta-information in the comments and harvest it\n if line.include? '**Author**'\n script.author = line.gsub(/^.*: /,'').chomp\n elsif line.include? '**Description**'\n script.description = line.gsub(/^.*: /,'').chomp\n elsif line.include? '**Usage**'\n script.usage = line.gsub(/^.*: /,'').chomp\n elsif line.include? '**Type**'\n script.type = line.gsub(/^.*: /,'').chomp\n end\n \n str << \"#{line.gsub(/^ *# /,'')}\"\n \n # Find the Gems used\n elsif line =~ /^ *require /\n gemname = line.gsub(/^ *require /,'').delete(\"'\").delete('\"').gsub('/','-').chomp\n # Don't add it unless it's not there or it's an absolute path\n unless ($gems.include? gemname) or (gemname =~ /^[A-Za-z]\\:/)\n $gems << gemname\n end\n \n str << \"\\n\" if comment \n comment = false\n str << \" #{line}\"\n # Evaluate as a code block if it's code\n else\n str << \"\\n\" if comment \n comment = false\n str << \" #{line}\"\n end\n end\n \n # Add the current script to the collection for indexing\n $scripts << script\n \n # The following outputs a complete documentation for each and every single script that's in the directory. Very useful if you remember to type them out properly, otherwise you get to spend a few hours fixing it. Do it right the first time, trust me.\n \n # Headers and style information\n docs.puts \"<html>\"\n docs.puts \"<link rel='stylesheet' type='text/css' href='style.css' />\"\n docs.puts \"<body>\"\n \n docs.puts \"<a href='index.html'>( << Back to Index )</a>\"\n \n # Insert the string into the docs\n docs.puts GitHub::Markdown.render(str)\n \n docs.puts \"<a href='index.html'>( << Back to Index )</a>\"\n docs.puts \"</body></html>\"\nend",
"def default_value_for_homepage\n positional_match_or_nil(@chunked_source.readme, HOMEPAGE_MATCH, 3) do |str|\n warn(\"Using homepage from README: #{str}\")\n end\n end",
"def document(path)\n begin\n target_path = File.join(root_path, lang.to_s, doc_to_fs_path(path))\n doc_file = open(target_path, \"r\")\n rescue Errno::ENOENT\n raise Documentation::DocumentNotFoundError, \"No such document: #{path.inspect}, lang: #{lang} (filesystem: #{target_path.inspect})\"\n end\n\n body = doc_file.read\n Documentation::Markdown::Preprocessing.preprocess! body\n\n @renderer.reset_for_reuse!\n rendered_source = @redcarpet.render(body)\n\n Documentation::Document.new(rendered_source, @renderer.toc_root)\n end",
"def load_manpages\n all_pages = `apropos .`.split \"\\n\"\n all_pages.each do |line|\n matches = APROPOS_REGEX.match line\n command = matches[1]\n description = matches[2]\n manpage = `man #{command}`\n\n client.index index: INDEX_NAME,\n type: client.info['version']['number'].to_i <= 6 ? :document : '_doc',\n body: {\n command: command,\n description: description,\n manpage: manpage\n }\n end\nend",
"def manifestdoc(files)\n raise _(\"RDOC SUPPORT FOR MANIFEST HAS BEEN REMOVED - See PUP-3638\")\n end",
"def convertHtmlToMarkdown\n root = pathExports\n n = 1\n Pathname.glob(pathExports() + \"**/*.html\").each do |p|\n puts \"File \" + n.to_s + \": \" + p.to_s\n n = n + 1\n infile = p.to_s\n outfile = p.sub_ext(\".md\").to_s\n command = \"pandoc -f html -t markdown -o #{outfile} #{infile}\"\n puts command\n sh(command)\n end\nend",
"def save_index\r\n # if validate_markdown_repo\r\n\r\n # Write full list to json file\r\n open(INDEX_PATH, 'w') do |f|\r\n f.puts JSON.pretty_generate @index\r\n end\r\n\r\n commit_markdown_repo(@today, ar_add=nil, message='index ')\r\n # end\r\n end",
"def default_page\n\t\t\t\tDir.chdir File.join(self.source, @site_name)\n\t\t\t\tFile.open Settings::PAGES_TEMPLATE + '/page.md', 'r' do |file|\n\n\t\t\t\t\tfront_matter = {\n\t\t\t\t\t\t'title' => 'Home Page',\n\t\t\t\t\t\t'date' => Time.now.strftime(\"%Y-%m-%d\"),\n\t\t\t\t\t\t'author' => 'Your Name',\n\t\t\t\t\t\t'template' => 'page'\n\t\t\t\t\t}\n\n\t\t\t\t\tcontents = Liquid::Template.parse(file.read).render front_matter \n\t\t\t\t\tFile.open(File.join(\"pages\", \"index.md\"), \"w\") do |f|\n\t\t\t\t\t\tf.write contents\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tFileUtils.mkdir_p(File.join(self.source, @site_name, \"media/images\", \"index\"))\n\t\t\tend",
"def page_name\n basename = File.basename @relative_name\n basename =~ /\\.(rb|rdoc|txt|md)$/i\n\n $` || basename\n end",
"def first_page\n # TODO are there cases where main_page = 'README' for 'lib/README'?\n if @options.main_page && (main_file = @all_files.find { |f| f.full_name == @options.main_page })\n main_file\n elsif (file = @simple_files.first)\n file\n elsif (cm = @unique_classes_and_modules.find { |k| !k.comment.empty? })\n cm\n elsif (file = @files_with_comment.first)\n file\n elsif !@unique_classes_and_modules.empty?\n @unique_classes_and_modules.find { |k| k.any_content } or\n @unique_classes_and_modules.first\n else\n @all_files.first\n end\n end",
"def index\n if request.format.html?\n api_canon_docs\n else\n super\n end\n end",
"def update_readme(data_hash)\n \n puts \"Updating README measure order...\"\n \n table_flag_start = \"MEASURE_WORKFLOW_START\"\n table_flag_end = \"MEASURE_WORKFLOW_END\"\n \n readme_path = \"README.md\"\n \n # Create table\n table_lines = []\n table_lines << \"|Group|Measure|Dependencies*|\\n\"\n table_lines << \"|:---|:---|:---|\\n\"\n data_hash.each do |group|\n new_group = true\n group[\"group_steps\"].each do |group_step|\n grp = \"\"\n if new_group\n grp = group[\"group_name\"]\n end\n name = group_step['name']\n deps = group_step['dependencies']\n table_lines << \"|#{grp}|#{name}|#{deps}|\\n\"\n new_group = false\n end\n end\n \n # Embed table in README text\n in_lines = IO.readlines(readme_path)\n out_lines = []\n inside_table = false\n in_lines.each do |in_line|\n if in_line.include? table_flag_start\n inside_table = true\n out_lines << in_line\n out_lines << table_lines\n elsif in_line.include? table_flag_end\n inside_table = false\n out_lines << in_line\n elsif not inside_table\n out_lines << in_line\n end\n end\n \n File.write(readme_path, out_lines.join(\"\"))\n \nend",
"def identifier_for_filename fn\n return super unless @hax_mode\n if 'md' == fn # there has to be a better way :(\n no_dot_dot = dot_dot_strip_assert(@hax_last_filename)\n if no_dot_dot == @config[:use_as_main_index] # 'README.md'\n identifier = '/' # overwrite the index.html generated by nandoc!\n else\n # '../README.md' => 'README.md' => '/README/'\n identifier = super(no_dot_dot)\n end\n else\n if fn\n if dot_dot_has?(fn)\n fail(\"fix this -- should never have dot dot name here: #{hn}\")\n end\n identifier = super(fn)\n else\n identifier = dot_dot_strip_assert(@hax_last_dirname)+'/'\n end\n end\n # before we get to resuce orphans we need to make sure we have\n # resolved some file as a site root. First one wins.\n if ! @hax_root_found\n shorter = slash_strip_assert(identifier)\n if @basenames.include?(shorter)\n @hax_root_found = true\n identifier = '/'\n end\n end\n use_identifier = identifier_normalize(identifier)\n use_identifier\n end",
"def index_file(file, pages_dir, stopwords, file_data)\n # Removing the dir from the file name\n # begin\n actual_name = file.gsub(pages_dir, \"\")\n # rescue NoMethodError\n# actual_name = badpage.html\n \n\n # Resetting the file path\n file_path = \"\"\n file_path = File.expand_path(\".\") + \"/\" + file\n\n print \"Parsing HTML document: \" + actual_name + \" \\n\"\n\n # Finding all the tokens in the file\n tokens = find_tokens(file_path)\n\n # Getting the page title, word count, and page url\n page_title = get_title(file_path)\n word_count = tokens.length\n page_url = file_data[actual_name]\n\n # Updating the docindex hash\n $docindex[file.gsub(pages_dir, \"\")] = [word_count, page_title, page_url]\n\n # Removing the stop words and getting the stem words in the file\n tokens = remove_stop_tokens(tokens, stopwords)\n tokens = stem_tokens(tokens)\n\n # Creating the invindex hash table\n for token in tokens\n begin\n if $invindex.member?(token)\n if $invindex[token].member?(actual_name)\n $invindex[token][actual_name] += 1\n else\n $invindex[token][actual_name] = 1\n end\n else\n $invindex[token] = {actual_name => 1}\n end\n # end\n# rescue NoMethodError\n # puts \"NoMethodError\"\n end\n #puts file_name\n # title = nil\n end\n #end\nend",
"def exec_index\n begin\n require 'fastri/version'\n fastri = true\n rescue LoadError\n fastri = false\n end\n if fastri\n if no_harm?\n $stderr.puts \"fastri-server -b\"\n else\n system \"fastri-server -b\"\n end\n else\n case config.installdirs\n when 'std'\n output = \"--ri-system\"\n when 'site'\n output = \"--ri-site\"\n when 'home'\n output = \"--ri\"\n else\n abort \"bad config: sould not be possible -- installdirs = #{config.installdirs}\"\n end\n\n if File.exist?('.document')\n files = File.read('.document').split(\"\\n\")\n files.reject!{ |l| l =~ /^\\s*[#]/ || l !~ /\\S/ }\n files.collect!{ |f| f.strip }\n else\n files = [\"lib\", \"ext\"]\n end\n\n opt = []\n opt << \"-U\"\n opt << output\n opt << files\n opt = opt.flatten\n\n if no_harm?\n puts \"rdoc #{opt.join(' ').strip}\"\n else\n #sh \"rdoc #{opt.join(' ').strip}\"\n require 'rdoc/rdoc'\n ::RDoc::RDoc.new.document(opt)\n end\n end\n end",
"def locate(*args)\n # TODO this will be a security hole, since ../ will likely work\n File.expand_path(File.join(basedir, args[0] + '.md'))\n end",
"def display_toc(cgi)\n res = \"\"\n index = Index.new\n index.each_message_dir do |md|\n res += %{<a href=\"display.cgi?ms=#{md.base_name}\">#{md.date_range}</a> }\n end\n res\nend",
"def html_markup_rdoc(text); end",
"def docs; end",
"def main\n blog = File.read(FILE_PATH).gsub(/-----[\\r\\n|\\n|\\r]EXTENDED BODY:/, '<!-- more -->')\n articles = split_to_articles(blog)\n headers_and_bodies = split_to_headers_and_bodies(articles)\n\n headers_and_bodies.each do |header_and_body|\n header_and_body[:header] = convert_header(header_and_body[:header])\n header_and_body[:body] = ReverseMarkdown.convert header_and_body[:body]\n end\n\n create_md_file(headers_and_bodies)\nend",
"def index\n @urls = ['index', 'fade_replace', 'simple_blind_ror', 'javascript']\n @descriptions = {'index' => \"listing of all demos/methods. This page\",\n \n 'fade_replace' => \"link triggers central page replacement. This will be used in the final project. Uses ror/rjs\",\n 'simple_blind_ror' => \"simplistic scriptaculous effects using ror. demo only.\",\n 'javascript' => \"uses javascript directly. demo only. straight from Scriptaculous site.\"\n }\n end",
"def to_markdown\n files = Dir.glob('**/*.html')\n files.each do |f|\n new_f = f.gsub 'html', 'markdown'\n system \"mv #{f} #{new_f}\" if File.file? f\n end\n end",
"def help(lang='en')\n readme.to_s(lang)\n end",
"def generate_index_templates(resource_docs)\n restapi_config = YAML.load(File.read(\"#{config_dir}/restapi_doc.yml\"))\n resource_docs.each { |resource| resource.parse_apidoc }\n template = IO.read(template_dir('index.html.haml'))\n parsed = Haml::Engine.new(template).render(Object.new, :project_info => restapi_config, :resource_docs => resource_docs)\n File.open(temp_dir(\"index.html\"), 'w') { |file| file.write parsed }\n \n # Generate detail files\n resource_docs.each do | resource_doc|\n generate_resource_detail_file!(resource_doc)\n end\n end",
"def help\n @page = HELP_PAGES.include?(params[:page]) ? params[:page] : 'index'\n contents = File.read(\"#{Rails.root}/app/views/help/#{@page}.markdown\")\n links_filename = \"#{Rails.root}/app/views/help/links/#{@page}_links.markdown\"\n links = File.exists?(links_filename) ? File.read(links_filename) : \"\"\n @help_content = RDiscount.new(contents+links).to_html.gsub MARKDOWN_LINK_REPLACER, '<tt>\\1</tt>'\n @help_pages = HELP_PAGES - ['tour']\n @help_titles = HELP_TITLES\n if !logged_in? || current_account.reviewer?\n render :template => 'home/help', :layout => 'home'\n else\n return self.index\n end\n end",
"def call(directory)\n file = \"#{directory}/README.md\"\n content = File.read(file)\n matched = content.match(PATTERN)\n\n return unless matched\n\n highlight_gem_syntax \\\n file, content, matched[:line], matched[:gem]\n end",
"def download_readme!(opts={})\n download!(:readme, opts)\n end",
"def document_snippet(doc)\n ret = nil\n File.open(\"./test 90/#{doc}\", \"r\") do |f|\n f.seek(12)\n ret = f.read(50)\n end\n return ret\nend",
"def gen_main_index\n if @template.const_defined? :FRAMELESS then\n #\n # If we're using a template without frames, then just redirect\n # to it from index.html.\n #\n # One alternative to this, expanding the main page's template into\n # index.html, is tricky because the relative URLs will be different\n # (since index.html is located in at the site's root,\n # rather than within a files or a classes subdirectory).\n #\n open 'index.html', 'w' do |f|\n f.puts(%{<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"\n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">})\n f.puts(%{<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\"\n lang=\"en\">})\n f.puts(%{<head>})\n f.puts(%{<title>#{CGI.escapeHTML(@options.title)}</title>})\n f.puts(%{<meta http-equiv=\"refresh\" content=\"0; url=#{@main_url}\" />})\n f.puts(%{</head>})\n f.puts(%{<body></body>})\n f.puts(%{</html>})\n end\n else\n main = RDoc::TemplatePage.new @template::INDEX\n\n open 'index.html', 'w' do |f|\n style_url = style_url '', @options.css\n\n classes = @classes.sort.map { |klass| klass.value_hash }\n\n values = {\n :initial_page => @main_url,\n :style_url => style_url('', @options.css),\n :title => CGI.escapeHTML(@options.title),\n :charset => @options.charset,\n :classes => classes,\n }\n\n values[:inline_source] = @options.inline_source\n\n main.write_html_on f, values\n end\n end\n end",
"def doclocation\n File.expand_path(\"../../uploaddoc/\", __FILE__)\nend"
] | [
"0.70091426",
"0.68860865",
"0.66454583",
"0.6599822",
"0.6556787",
"0.6556787",
"0.6556787",
"0.6525539",
"0.6373534",
"0.6365624",
"0.6364324",
"0.63603204",
"0.63311034",
"0.6317537",
"0.6239274",
"0.61938655",
"0.61938655",
"0.6176773",
"0.61716115",
"0.61703575",
"0.6159396",
"0.6128018",
"0.60838765",
"0.60638636",
"0.60500544",
"0.6043932",
"0.6036321",
"0.6032578",
"0.6027122",
"0.60206664",
"0.6007753",
"0.59859437",
"0.5978813",
"0.59533185",
"0.59447455",
"0.5917628",
"0.5907444",
"0.58846474",
"0.58709383",
"0.5856121",
"0.5845794",
"0.5821299",
"0.5820286",
"0.58102506",
"0.5800027",
"0.57898545",
"0.57851815",
"0.57801986",
"0.5765331",
"0.5764072",
"0.5761688",
"0.57598114",
"0.5759284",
"0.5745819",
"0.572151",
"0.5715447",
"0.5705164",
"0.5705164",
"0.5694933",
"0.56887573",
"0.5682602",
"0.5671546",
"0.56488293",
"0.56446147",
"0.56444454",
"0.5643814",
"0.564279",
"0.56380963",
"0.56374335",
"0.56367606",
"0.5622274",
"0.5617211",
"0.5609872",
"0.56037194",
"0.559422",
"0.557406",
"0.55731064",
"0.5555825",
"0.5550459",
"0.55440927",
"0.55381155",
"0.55157745",
"0.55128807",
"0.55114824",
"0.5506208",
"0.5486386",
"0.5470525",
"0.5465206",
"0.5463434",
"0.5460251",
"0.5445177",
"0.54441375",
"0.5443968",
"0.5442256",
"0.54251444",
"0.5425121",
"0.5424614",
"0.54230976",
"0.54167515",
"0.5415914"
] | 0.7049303 | 0 |
This is poormans markdown strip. Convert to HTML, strip tags and return plain text suitable to act as the content for the search index. | def searchable_content(file)
content = File.read file
content = CommonMarker.render_html content
content.remove(/<\/?[^>]*>/).gsub("\n", " ")
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def plain_text(text)\n strip_tags(markdown.render(text.to_s)).strip\n end",
"def markdown(text)\n sanitize(BlueCloth::new(text).to_html)\n end",
"def markdown(text)\n sanitize BlueCloth::new(text).to_html\n end",
"def process_markdown\n self.data = self.class.convert_markdown(self.data)\n sanitize_html\n end",
"def markdown(txt)\n options = %i[\n hard_wrap filter_styles autolink\n no_intraemphasis fenced_code gh_blockcode\n ]\n doc = Nokogiri::HTML(Redcarpet.new(txt, *options).to_html)\n doc.search('//pre[@lang]').each do |pre|\n pre.replace Albino.colorize(pre.text.rstrip, pre[:lang])\n end\n doc.xpath('//body').to_s.gsub(%r{</?body>}, '').html_safe\n end",
"def process_markdown\n self.explanation_html = Kramdown::Document.new(explanation_md || '').to_html.gsub(/[\\r\\n]+/, '')\n end",
"def strip_html(text)\n Nokogiri::HTML.fragment(text).content\n end",
"def html_markup_markdown(text); end",
"def markdown(text)\n raw sanitize Markdown.new(text).to_html\n end",
"def text\n html.gsub(REGEX_TAGS, \"\")\n end",
"def markdown(text)\n return \"\" unless text\n\n CommonMarker.render_doc(text, :SMART).to_html([:NOBREAKS]).html_safe\n end",
"def as_text\n return self if self.blank?\n mytext = self.gsub(/<p>(.*?)<\\/p>/mi,'\\1'+\"\\n\\n\")\n mytext = mytext.gsub(/<br(.*?)>/mi,\"\\n\") \n mytext = mytext.gsub(/<p(.*?)>/mi,\"\\n\\n\") \n mytext = mytext.gsub(/<\\/p>/mi,\"\") \n mytext = mytext.gsub(/<div(.*?)>/mi, \"\")\n mytext = mytext.gsub(/<\\/div>/mi,\"\") \n # Go ahead and strip all the other html tags as well\n mytext = mytext.gsub(/<\\/?[^>]*>/, \"\")\n CGI.unescapeHTML(mytext).strip\n end",
"def to_markdown(html, _options = {})\n cleaned = html.scrub\n cleaned = pre_sanitize(cleaned)\n cleaned = Sanitize.fragment(cleaned, HTML_SANITIZATION_CONFIG)\n Kramdown::Document.new(cleaned, input: :html).to_kramdown.strip\n rescue\n 'The description could not be imported, the most likely cause of this is that it contained illegal HTML markup'\n end",
"def strip_tags(html); end",
"def strip_tags(html); end",
"def strip_tags(html); end",
"def strip_html_tags!\n @raw.gsub!(/<[^>]+?>/, ' ')\n end",
"def strip_tags(html)\n return html if html.blank?\n if html.index(\"<\")\n text = \"\"\n tokenizer = ::HTML::Tokenizer.new(html)\n while token = tokenizer.next\n node = ::HTML::Node.parse(nil, 0, 0, token, false)\n # result is only the content of any Text nodes\n text << node.to_s if node.class == ::HTML::Text\n end\n # strip any comments, and if they have a newline at the end (ie. line with\n # only a comment) strip that too\n text.gsub(/<!--(.*?)-->[\\n]?/m, \"\")\n else\n html # already plain text\n end\n end",
"def md(text)\n renderer = Redcarpet::Render::HTML.new\n extensions = {filter_html: true}\n redcarpet = Redcarpet::Markdown.new(renderer, extensions)\n redcarpet.render(text).html_safe\n end",
"def to_md\n # Usually ruby is extremely readable, but I think \"-1\" means \"give me all the \n # trailing blank lines\" is surprisingly opaque. That's what the -1 does...\n lines = @text.split(\"\\n\", -1)\n lines.collect do |line|\n result = line\n \n # Leave lines that start with 4 spaces alone. These are code blocks and\n # should pass through unchanged.\n unless result =~ /^\\s{4,}/\n \n # Convert headers\n result.sub!(/^(=){1,6}/) { |s| \"#\" * s.length} unless result =~ /^={7,}/\n \n # Convert strong to have two stars\n # The matching pair of stars should start with a single star that is either at\n # the beginning of the line or not following a backslash, have at least one\n # non-star and non-backslash in between, then end in one star\n result.gsub!(/(\\A|[^\\\\\\*])\\*([^\\*]*[^\\*\\\\])\\*/, '\\1**\\2**')\n\n # Convert inline code spans to use backticks\n result.gsub!(/(\\A|[^\\\\])\\+([^\\+]+)\\+/, '\\1`\\2`')\n\n # Convert bare http:, mailto: and ftp: links\n result.gsub!(/(\\A|\\s)(http:|https:|mailto:|ftp:)(\\S*)/, '\\1[\\2\\3](\\2\\3)')\n\n # Convert bare www to an http: link\n result.gsub!(/(\\A|\\s)www\\.(\\S*)/, '\\1[www.\\2](http://www.\\2)')\n\n # Convert link: links to refer to local files\n result.gsub!(/(\\A|\\s)link:(\\S*)/, '\\1[\\2](\\2)')\n\n # Convert multi word labels surrounded by {} with a url\n result.gsub!(/\\{([^\\}]*)\\}\\[(\\S*)\\]/, '[\\1](\\2)')\n\n # Convert one word labels with a url\n result.gsub!(/(\\A|\\s)([^\\{\\s]\\S*)\\[(\\S*)\\]/, '\\1[\\2](\\3)')\n\n end\n \n result\n end.join(\"\\n\")\n end",
"def to_prawn\n MATCHERS.inject(@text) do |final_string, (markdown_matcher, prawn_tag)|\n final_string.gsub(markdown_matcher, prawn_tag)\n end\n end",
"def stripped_content\n\t\treturn self.content.gsub( /<.*?>/, '' ).tr( \"\\n\\t \", ' ' ).strip\n\tend",
"def convert_markdown\n self.html_content = Raptor::Markdown.render(self.content)\n end",
"def markdown_to_html\n html = RDiscount.new(File.read file).to_html\n html = syntax_highlight(html) if config.highlighter\n html = prepend_h1(html) if config.autoh1\n html\n end",
"def lstrip_html\n return if self.blank?\n\n m = self.match(/\\A(\\s*?[^<]|(.*?)>\\s*[^<])/) #Find first printing character\n return self unless m\n \n ldr = m[0]\n ldr_last = ldr.slice(ldr.size-1, ldr.size)\n ldr = ldr.slice(0,ldr.size-1) # portion up to the first printing character\n bdy = ldr_last + m.post_match # portion following the first printing character\n \n cln_ldr = ldr.gsub(/<p/mi, \"<span\")\n cln_ldr = cln_ldr.gsub(/<\\/p/mi, \"</span\")\n cln_ldr = cln_ldr.gsub(/<br(.*?)>/mi, \"\")\n \n m = bdy.match(/(\\A.*?)<p/mi)\n if !m\n bdy = bdy.sub(/<\\/p/mi, \"</span\") # change first closing </p> from an open <p> remaining from ldr\n else\n l = m.post_match\n f_cln = m[0].gsub(/<\\/p/mi, \"</span\") # change any closing </p> from and open <p> remaining from ldr\n bdy = f_cln + l \n end\n return cln_ldr + bdy \n end",
"def html\n Markdown.render(self.content)\n end",
"def html\n Markdown.render(self.content)\n end",
"def markdown(text)\n return if text.blank?\n\n extensions = {\n superscript: true,\n disable_indented_code_blocks: true,\n fenced_code_blocks: true\n }\n render_options = {\n filter_html: true,\n no_images: true,\n no_styles: true,\n safe_links_only: true,\n space_after_headers: true\n }\n\n renderer = Redcarpet::Render::HTML.new(render_options)\n m = Redcarpet::Markdown.new(renderer, extensions)\n sanitize(m.render(text))\n end",
"def clean_up_text\n text.gsub!(/<br/, \"\\n<br\")\n text.gsub!(/<p/, \"\\n<p\")\n text.gsub!(/<\\/?span(.*?)?>/, '')\n text.gsub!(/<\\/?div(.*?)?>/, '')\n end",
"def call\n # mode = (context[:gfm] != false) ? :gfm : :markdown\n # html = GitHub::Markdown.to_html(@text, mode)\n # html.rstrip!\n # html\n\n options = context[:redcarpet_options] || {}\n renderer = HtmlWithRouge.new(\n filter_html: options.fetch(:filter_html, false),\n hard_wrap: options.fetch(:hard_wrap, true)\n )\n options.reject! { |k, v| [:filter_html, :hard_wrap].include?(k) }\n markdown = Redcarpet::Markdown.new(renderer, options)\n html = markdown.render(@text)\n html.rstrip!\n html\n end",
"def contents()\n html = Nokogiri::HTML(@markdown_document.to_html())\n\n # Fixup refs to other markdown documents\n html.css(\"a\").each do |anchor|\n anchor[\"href\"] = anchor[\"href\"].sub(%r{\\.md$}, \".html\")\n end\n\n # Since we transform device-specific $device/README.md pages into\n # discrete $device.html, we need to fixup cross-linking into its namespace\n # This could be generalized some more, to be fixed once we have other internal links to mismatched README.md/index.html locations.\n if File.dirname(relative_output) == \"devices\"\n html.css(\"a\").each do |anchor|\n if anchor[\"href\"].match(%r{\\.\\./[^\\.]+$})\n anchor[\"href\"] = anchor[\"href\"].sub(%r{\\.\\./}, \"devices/\") + \".html\"\n end\n end\n end\n\n # Since Nokogiri produces a complete document from our fragment, we\n # have to pick only what's in the body; so strip the body added tags and higher-up tags.\n html\n .at_css(\"body\").to_s()\n .sub(%r{^<body>}, \"\").sub(%r{</body>$}, \"\")\n end",
"def format(text)\n sanitize(markdown(text))\n end",
"def parse_markdown\n self.bio_html = markdown.render(bio_markdown)\n end",
"def strip_html(text)\n @name =\n # Remove HTML from the text\n Sanitize.clean(text).\n # Replace newlines with a space\n gsub(/\\n|\\r/, ' ').\n # Replaces runs of spaces by a single space\n squeeze(' ').\n # Remove leading and trailing whitespace\n strip\nend",
"def markdown(text)\n return '' if text.nil? || text.empty?\n Wink::Markdown.new(text, :smart).to_html\n end",
"def markdown(text)\n return '' if text.nil? || text.empty?\n Wink::Markdown.new(text, :smart).to_html\n end",
"def markdown(text)\n# coderayified = CodeRayify.new(:filter_html => true,:hard_wrap => true)\n coderayified = PygmentsHTML.new(\n with_toc_data: true,\n hard_wrap: true\n )\n# coderayified = Redcarpet::Render::HTML.new(:filter_html => true,:hard_wrap => true)\n options = {\n :no_intra_emphasis => true,\n :tables => true,\n :fenced_code_blocks => true,\n :autolink => true,\n :strikethrough => true,\n :lax_spacing => true,\n :superscript => true\n }\n markdown_to_html = Redcarpet::Markdown.new(coderayified, options)\n markdown_to_html.render(text).html_safe\n end",
"def convert_markdown\n # self.content_html = Kramdown::Document.new(content).to_html\n self.content_html = markdown(content)\n end",
"def strip_html_tags(text)\n return text.gsub!(/(<[^>]*>)|\\n|\\t/s) {\" \"}\n end",
"def markdown(text)\n BlueCloth::new(text).to_html\n end",
"def markdown\n @markdown ||= process_markdown!\n end",
"def markdown content\n require \"kramdown\"\n\n content = content.\n gsub(/^``` *(\\w+)/) { \"{:lang=\\\"#$1\\\"}\\n~~~\" }.\n gsub(/^```/, '~~~')\n\n Kramdown::Document.new(content, KRAMDOWN_CONFIG).to_html\n end",
"def markdown content\n require \"kramdown\"\n\n content = content.\n gsub(/^``` *(\\w+)/) { \"{:lang=\\\"#$1\\\"}\\n~~~\" }.\n gsub(/^```/, '~~~')\n\n Kramdown::Document.new(content, KRAMDOWN_CONFIG).to_html\n end",
"def markdown(text)\n BlueCloth.new(text).to_html\n end",
"def markdown(text)\n\t\tBlueCloth::new(text).to_html\n\tend",
"def markdownify(input); end",
"def markdown(doc)\n doc = Nokogiri::HTML(doc)\n if doc.at(\"body\").nil?\n doc = \"\"\n else\n doc = ReverseMarkdown.convert(doc.at(\"body\").inner_html)\n end\n end",
"def to_markdown\n lines = @bare_content.split(\"\\n\")\n markdown = \"\"\n\n # Using `while` here so we can alter the collection at will\n while current_line = lines.shift\n # If we got us some of them bird tracks...\n if current_line =~ BIRD_TRACKS_REGEX\n # Remove the bird tracks from this line\n current_line = remove_bird_tracks(current_line)\n # Grab the remaining code block\n current_line << slurp_remaining_bird_tracks(lines)\n\n # Fence it and add it to the output\n markdown << \"```haskell\\n#{current_line}\\n```\\n\"\n else\n # No tracks? Just stick it back in the pile.\n markdown << current_line + \"\\n\"\n end\n end\n\n markdown\n end",
"def preprocessMarkdownForHTML(markdown)\n output = \"\"\n inInstructions = false\n \n markdown.split(\"\\n\").each do |line|\n # parse an instructions list\n # use a dummy HTML tag so our final regex doesn't get stuck in an infinite loop replacing itself\n instructionsMatch = line.match(/^>>\\s*(.*?)$/)\n if instructionsMatch\n if not inInstructions\n output += \"<instructions>\\n\"\n end\n output += instructionsMatch[1] + \"\\n\"\n inInstructions = true\n next # don't try to parse anything else\n elsif inInstructions\n output += \"</instructions>\\n\"\n inInstructions = false\n end\n\n # parse headers and page IDs\n headerMatch = line.match(/^(#+)\\s+(.*?)\\s+@@(.*?)$/)\n if headerMatch\n headerLevel = headerMatch[1].length.to_s\n headerTitle = headerMatch[2]\n headerID = headerMatch[3]\n node = nodeWithID(headerID, $doc.toc.rootNode)\n if not node\n puts \"ERROR: Couldn't find node with ID #{headerID}\"\n exit 1\n end\n output += \"<h#{headerLevel}><a name=\\\"#{headerID}\\\">#{node.levelPrefix} #{headerTitle}</a></h#{headerLevel}>\\n\"\n next\n end\n \n # parse links to page IDs and replace with links to the real .htm file\n while 1\n linkMatch = line.match(/\\[.*?\\]\\((@@(.*?))\\)/)\n if linkMatch\n linkID = linkMatch[2]\n linkValue = linkToPageIDFrom(linkID, \"_PAGE_\") # use dummy value\n if not linkValue\n puts \"ERROR: Invalid link ID \\\"#{linkID}\\\"\"\n exit 1\n end\n line[linkMatch[1]] = linkValue\n else\n break\n end\n end\n \n # parse image and label combo\n imgLabelMatch = line.match(/!!\\[(.*?)\\]\\((.*?)\\)/)\n if imgLabelMatch\n label = imgLabelMatch[1]\n imgPath = imgLabelMatch[2]\n \n # read the image and width height to force the size on images for better loading\n # when viewing the files in the boot DVD. there are some issues where anchor jump\n # links don't always jump to the right place on the boot DVD and apparently forcing\n # the image sizes allows the pages to jump properly.\n \t\timgWidth = 0\n \t\timgHeight = 1\n \t\tbegin\n \t\t data = nil\n \t\t if (imgPath =~ /.png$/)\n \t\t data = IO.read($pagesDir + \"/\" + imgPath, 8, 16).unpack('NN')\n\t\t else\n\t\t puts \"ERROR: Unsupported image type: #{imgPath}\"\n\t\t exit 1\n\t end\n \t\t if (data)\n \t\t imgWidth = data[0]\n \t\t imgHeight = data[1]\n\t\t end\n\t\t rescue\n\t end\n imgWidthHeightAttrs = \"\"\n if imgWidth != 0 and imgHeight != 0\n imgWidthHeightAttrs = \" width=\\\"#{imgWidth}\\\" height=\\\"#{imgHeight}\\\"\"\n end\n\n output += \"<p class=\\\"imageAndLabel\\\"><img src=\\\"#{imgPath}\\\" alt=\\\"\" + CGI::escapeHTML(label) + \"\\\"#{imgWidthHeightAttrs}/><br/><em>\" + CGI::escapeHTML(label) + \"</em></p>\\n\"\n next\n end\n \n # parse warning paragraphs\n warningMatch = line.match(/^!!\\s+(.*?)$/)\n if warningMatch\n output += \"<warning>\\n\" + warningMatch[1] + \"\\n<\\/warning>\\n\"\n next\n end\n \n output += line + \"\\n\"\n end\n \n # close off an open instructions div\n if inInstructions\n output += \"</instructions>\\n\"\n end\n \n # Markdown doesn't allow processing of markup within block-level tags such as <div>, so we have to manually process the markup.\n # We call preprocessMarkdownForHTML() to properly process our custom markup within these custom block elements.\n # An extra newline is added to force a paragraph\n while 1\n instructionsMatch = output.match(/(<instructions>)(.*?)(<\\/instructions>)/m)\n if instructionsMatch\n output[instructionsMatch[1]] = \"<div class=\\\"instructions\\\">\"\n output[instructionsMatch[2]] = markdownToHTML(preprocessMarkdownForHTML(\"\\n\"+instructionsMatch[2]))\n output[instructionsMatch[3]] = \"</div>\"\n else\n break\n end\n end\n \n while 1\n warningMatch = output.match(/(<warning>)\\s*(.*?)(<\\/warning>)/m)\n if warningMatch\n output[warningMatch[1]] = \"<div class=\\\"warning\\\"><div class=\\\"warningBody\\\"><div class=\\\"warningImg\\\"></div><div class=\\\"warningContent\\\">\"\n output[warningMatch[2]] = markdownToHTML(preprocessMarkdownForHTML(\"\\n\"+warningMatch[2]))\n output[warningMatch[3]] = \"</div></div></div>\"\n else\n break\n end\n end\n \n return output\nend",
"def markdown( text )\n Kramdown::Document.new( text ).to_html\n end",
"def normalize_text(content)\n replaced_content = content.gsub(/\\n|<br>| /) do |match|\n case match\n when \"\\n\", \" \"\n \"\"\n when \"<br>\"\n \"\\n\"\n end\n end.sub(/\\s*(---|‐‐‐|―――)\\s*\\z/, \"\")\n strip_html(replaced_content)\n end",
"def safe_text text\n return \"\" if text.nil?\n \n markdown_content_type = \"# Content-Type: text/markdown\"\n starts_with_markdown = text.strip.start_with? markdown_content_type\n if (not /<(a |img |ol|ul|li|h[1-6]|p|div|span)[^<]*>/.match(text)) && !starts_with_markdown\n return \"<blockquote>\" + CGI::escape_html(text).gsub(\"\\n\",\"<br />\\n\") + \"</blockquote>\"\n end\n\n if BMF::Settings.instance.display_sanitized_html != 'yes'\n return \"<blockquote>\" + CGI::escape_html(text).gsub(\"\\n\", \"<br />\\n\") + \"</blockqoute>\"\n end\n\n if text.strip.start_with? markdown_content_type\n text = RDiscount.new(text.sub(markdown_content_type, \"\")).to_html\n end\n\n safe_html(text)\n \n end",
"def process_full_text(text)\n frag = Nokogiri::HTML::DocumentFragment.parse text.to_html\n clean_text = Nokogiri::HTML::DocumentFragment.parse \"\"\n\n frag.traverse do |node|\n # skip empty <br> elements\n next if node.nil? || node.name == \"br\"\n\n # Construct a new <p> with extracted text\n if node.text?\n new_p = Nokogiri::XML::Node.new(\"p\", clean_text)\n new_p.content = node.text.strip\n clean_text << new_p\n end\n end\n clean_text.to_html\n end",
"def markdown(text)\n BlueCloth::new(text).to_html.html_safe\n end",
"def strip_html\n gsub(HTML_TAG_PATTERN, \"\")\n end",
"def plain_text\n text ? text.gsub(/<[^>]+>/,' ').squeeze(' ').strip : nil\n end",
"def clean_markdown(md)\n lines = md.split(\"\\n\")\n cleaned = []\n last_stripped = ''\n\n lines.each do |itm|\n stripped = itm.strip\n\n if stripped.start_with?('* ')\n # make sure there is a blank line before the first line of a UL\n if last_stripped != \"\\n\" && !last_stripped.start_with?('* ')\n cleaned << ''\n end\n\n elsif stripped.start_with?('1. ')\n # make sure there is a blank line before the first line of a OL\n if last_stripped != \"\\n\" && !last_stripped.start_with?('1. ')\n cleaned << ''\n end\n\n elsif stripped.start_with?('>')\n # make sure there is a blank line before the first line of a block quote\n if last_stripped != \"\\n\" && !last_stripped.start_with?('>')\n cleaned << ''\n end\n end\n\n cleaned << itm\n last_stripped = stripped\n end\n\n cleaned.join(\"\\n\")\n end",
"def markdown(text)\n Markdown.new(text, :hard_wrap, :autolink).to_html.html_safe\n end",
"def substitute_markdown_inside_raw_html\n each_element(:raw_html) do |e|\n html = e.parsed_html\n next unless html\n\n html.process_markdown_inside_elements(self)\n end\n end",
"def format(text)\n sanitize(markdown(text))\n end",
"def extract_content(post_content)\n doc = Hpricot(post_content)\n html = ''\n if (doc % :blockquote)\n parts = (doc % :blockquote).children\n quoting = false\n bcount = 0\n\n if parts.size > 2\n parts[2..-1].each do |child|\n br = child.to_s == '<br />'\n bcount = br ? bcount + 1 : 0\n if bcount > 2\n quoting = !quoting\n html += quoting ? \"<p><blockquote>\" : \"</blockquote></p>\"\n bcount = 0\n else\n html += child.to_s\n end\n end\n\n html += '</blockquote></p>' if quoting\n html = html.gsub('<br /><br /><blockquote>', '<blockquote>')\n html = html.gsub('<br /><br /></p>', '</p>')\n html = html.gsub('<p><br />', '<p>')\n end\n end\n end",
"def markdown_to_html(src)\n render_options = {\n prettify: true,\n }\n renderer = MarkdownRenderer.new(render_options)\n extensions = {\n no_intra_emphasis: true,\n autolink: true,\n tables: true,\n fenced_code_blocks: true,\n strikethrough: true,\n underline: true,\n quote: true,\n footnotes: true,\n }\n md = ::Redcarpet::Markdown.new(renderer, extensions)\n html = md.render(src)\n return html\n end",
"def liquidize_markdown(content, arguments)\n # doc = BlueCloth.new(Liquid::Template.parse(content).render(arguments, :filters => [LiquidFilters], \n # :registers => {:controller => controller, :view => self, :account_site_assets => account_site_assets, :current_user => current_user}))\n doc = ::Kramdown::Document.new(Liquid::Template.parse(content).render(arguments, :filters => [LiquidFilters], \n :registers => {:controller => controller, :view => self, :account_site_assets => account_site_assets, :current_user => current_user}),\n :parse_block_html => true)\n return doc.to_html.html_safe\n end",
"def htmlClean(html)\n html\nend",
"def markdown(text)\n BlueCloth::new(text).to_html\nend",
"def content\n MarkdownService.new.render(body).html_safe \n end",
"def markdown_to_html(src)\n render_options = {\n prettify: true,\n }\n renderer = MdRenderer.new(render_options)\n extensions = {\n no_intra_emphasis: true,\n autolink: true,\n tables: true,\n fenced_code_blocks: true,\n strikethrough: true,\n underline: true,\n quote: true,\n footnotes: true,\n }\n md = ::Redcarpet::Markdown.new(renderer, extensions)\n html = md.render(src)\n return html\n end",
"def normalise_html(html)\n Nokogiri::HTML5.fragment(html).to_s.gsub(\"\\n\", \"\")\n end",
"def remove_paragraph_tags mytext\n mytext.sub!(/^<p>\\s*<\\/p>/,\"\")\n mytext.sub!(/(<br>)*<p>\\s*<\\/p>$/,\"\")\n mytext.sub!(/^<p>/,'')\n mytext.sub!(/<\\/p>?/,'')\n return mytext\n end",
"def markdown_to_html(content)\n render_options = {hard_wrap: true, filter_html: true, safe_links_only: true}\n markdown_options = {no_intraemphasis: true, autolink: true, superscript: true, fenced_code_blocks: true }\n markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new(render_options),markdown_options)\n return markdown.render(content).html_safe\n end",
"def tomdoc\n lines = raw.split(\"\\n\")\n\n # remove remark symbol\n if lines.all?{ |line| /^\\s*#/ =~ line }\n lines = lines.map do |line|\n line =~ /^(\\s*#)/ ? line.sub($1, '') : nil\n end\n end\n\n # for some reason the first line is coming in without indention\n # regardless, so we temporary remove it\n first = lines.shift\n\n lines = deindent(lines)\n\n # put first line back\n unless first.nil?\n lines.unshift(first.sub(/^\\s*/,''))\n end\n\n lines.compact.join(\"\\n\")\n end",
"def markdown(md_text, options={})\n config = sanitize_config(options[:trusted?] || false)\n filtered = Filters.apply_all_to(md_text)\n Sanitize.clean(RDiscount.new(filtered, :smart).to_html, config).html_safe\n end",
"def markdown_to_html_safe(markdown_text)\n result = '<div class=\"markdown\">' + RDiscount.new(markdown_text).to_html + '</div>'\n result.html_safe\n end",
"def text_wikimedia_html page\n html = @client.text_wikimedia_html page\n # normalize html by removing <!-- html comments -->\n doc = Nokogiri.HTML html\n (doc.xpath '//comment()').remove\n doc.inner_html\n end",
"def content\n lines = super.lines.to_a\n fixed = []\n current_line = 0\n offset = 0\n formatted_lines = markup.lines.to_a\n lines.each_with_index do |line, index|\n formatted_line = formatted_lines[index + offset]\n if line.strip == \"\" and (formatted_line and formatted_lines[index + offset].strip != \"\")\n offset -= 1\n else\n fixed << line\n end\n end\n lines = fixed.join(\"\")\n lines\n end",
"def markdown(text)\n render_options = {\n filter_html: true,\n hard_wrap: true,\n link_attributes: { rel: 'nofollow' }\n # no_images: true\n }\n renderer = Redcarpet::Render::HTML.new(render_options)\n\n extensions = {\n autolink: true,\n fenced_code_blocks: true,\n lax_spacing: true,\n no_intra_emphasis: true,\n }\n Redcarpet::Markdown.new(renderer, extensions).render(text).html_safe\n end",
"def markdown_to_text(content,user)\n filenumber = user.id\n filename = filenumber.to_s+\".txt\"\n %x[\"touch #{filename}\"]\n File.atomic_write(\"tmp/#{filename}\") do |file|\n file.write(content)\n end\n text_content = `pandoc -f markdown -t text \"tmp/#{filename}\"`\n File.delete(\"tmp/#{filename}\")\n return text_content\n end",
"def text_only(html)\n Nokogiri::HTML.parse(html).text.gsub(/\\A\\p{Space}+|\\p{Space}+\\z/, '')\n .strip\n end",
"def strip_tags(html)\n Sanitize.clean(html.strip).strip\n end",
"def strip_tags(html)\n Sanitize.clean(html.strip).strip\n end",
"def clean_article_body\n get_source_selectors.each do |selector|\n if @page.search(selector).present?\n @page = page.search(selector)\n break\n end\n end\n # Strip unwanted spaces and newlines.\n @page.collect {|elt| elt.content.strip.gsub(/\\n|\\r/, '').gsub(/\\ +/, ' ')}.join(' ')\n end",
"def markdown\n markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML)\n end",
"def conv_markup(txt)\n txt.\n gsub(%r{<tt>(.*?)</tt>}) { \"+#$1+\" } .\n gsub(%r{<code>(.*?)</code>}) { \"+#$1+\" } .\n gsub(%r{<b>(.*?)</b>}) { \"*#$1*\" } .\n gsub(%r{<em>(.*?)</em>}) { \"_#$1_\" }\n end",
"def markdown(text)\n htmlcoderay = HTMLWithCoderay.new(filter_html: true,\n hard_wrap: true)\n options = {\n fenced_code_blocks: true,\n no_intra_emphasis: true,\n autolink: true,\n lax_html_blocks: true,\n tables: true,\n strikethrough: true,\n superscript: true,\n underline: true,\n highlight: true,\n footnotes: true,\n with_toc_data: true\n }\n\n markdown_to_html = Redcarpet::Markdown.new(htmlcoderay, options)\n begin\n markdown_to_html.render(text).html_safe\n rescue ArgumentError => e\n \"<h1>Error in Markdown: #{e.message}</h1><p>#{@blog.body}</p>\".html_safe\n end\n end",
"def markdown(text) # Define method markdown with redcarpet gem\n\t\tRedcarpet::Markdown.new(Redcarpet::Render::HTML).render(text)\n end",
"def to_html_fragment\n to_markdown_slices.map do |tagged_markdown|\n tagged_markdown.to_html\n end.join(\"\\n\\n\")\n end",
"def html_to_markdown(content,user)\n filenumber = user.id\n filename = filenumber.to_s+\".html\"\n %x[\"touch #{filename}\"]\n File.atomic_write(\"tmp/#{filename}\") do |file|\n file.write(content)\n end\n html_content = `pandoc -f html -t markdown \"tmp/#{filename}\"`\n File.delete(\"tmp/#{filename}\")\n return html_content\n end",
"def text_for_post(post)\n post.render(site.layouts, site.site_payload)\n\n doc = Nokogiri::HTML(post.output)\n doc.css('script').remove().css('#doc_nav').remove()\n\n # one line it\n doc.text.gsub(/[\\r\\n\\s]+/,\" \")\n end",
"def parse_markdown\n self.body_html = markdown.render(body_markdown.to_s)\n self.summary_html = markdown.render(summary_markdown.to_s)\n end",
"def md_to_html\n options = {\n filter_html: true,\n link_attributes: {\n rel: \"nofollow\",\n target: \"_blank\"\n }\n }\n\n extensions = {\n space_after_headers: true,\n autolink: true\n }\n\n renderer = Redcarpet::Render::HTML.new(options)\n markdown = Redcarpet::Markdown.new(renderer, extensions)\n\n self.description = markdown.render(self.description_md)\n end",
"def markdown(text)\n renderer = HTMLwithPygments.new(:hard_wrap => true)\n options = {\n :fenced_code_blocks => true,\n :no_intra_emphasis => true,\n :autolink => true,\n :strikethrough => true,\n :lax_html_blocks => true,\n :superscript => true,\n :space_after_headers => true\n }\n Redcarpet::Markdown.new(renderer, options).render(text).html_safe\n end",
"def text\n return @text if (defined?(@text) && !@text.nil?)\n content = Readability::Document.new(@html).content #segfaults be damned\n\n if content.nil? || content.empty?\n #this is reaalll dirty...but it mostly works\n @text = encode_utf8_with_extreme_prejudice(@html).\n gsub(/\\&\\w+\\;|\\<\\S+|\\s\\>|\\\"|\\\\n|\\/|\\\\r|\\u0093|\\u0094|\\n|\\r|\\t/, \" \"). #remove HTML tags and wierd Unicode charecters\n scan(/([\\w+\\s+\\:\\-\\(\\)\\?\\.\\,\\\"\\'\\/\\`\\$\\u2013\\u2019\\u201C\\u201D\\!\\\\xC2\\\\xA0]{300,})/).join. #scan for blocks of text with punctuation 300+ chars\n gsub(/\\xC2\\xA0/, \" \").gsub(/\\?/, \"? \").gsub(/\\s\\?/, \"?\").gsub(/\\!/, \"! \").gsub(/\\./, \". \"). #fix broken punctuation\n gsub(/\\:/, \": \").gsub(/[A-Z]\\w/, ' \\0').gsub(/\\s{2,}/, \" \").gsub(/[A-Za-z0-9]{30,}/,\"\") #fix even more punctuation, remove extraneous data\n else\n #even the Readability text has HTML in it. Remove it.\n @text = (Nokogiri::HTML(content).text).gsub(/\\n|\\t|\\r/,\"\").gsub(/\\?/, \"? \").gsub(/\\s\\?/, \"?\").\n gsub(/\\!/, \"! \").gsub(/\\:/, \": \").gsub(/\\s{2,}/, \" \")\n end\n\n filter_param = (self.params[:q] || self.params[:query] || self.params[:search])\n\n if filter_param\n @text = @text.split.map{|x| x.split(/(#{filter_param})/i).each_slice(2).map(&:join)}.flatten.join(\" \")\n end\n\n return @text\n end",
"def markdown(text)\n options = [:filter_html, :hard_wrap, :autolink, :no_intraemphasis, :gh_blockcode, :fenced_code]\n syntax_highlighter(Redcarpet.new(text, *options).to_html).html_safe\n end",
"def clean text\n text.gsub(/(\\n|\\t|\\r)/, ' ').gsub(/>\\s*</, '><').squeeze(' ')\n end",
"def markdown(text)\n Redcarpet::Markdown.new(\n Redcarpet::Render::HTML,\n autolink: true,\n tables: true\n ).render(text).html_safe # rubocop:disable Rails/OutputSafety\n end",
"def markdown(text)\n coderayified = CodeRayify.new(filter_html: true, hard_wrap: true)\n options = {\n link_attributes: { rel: 'nofollow', target: '_blank' },\n space_after_headers: true,\n fenced_code_blocks: true,\n no_intra_emphasis: true,\n autolink: true,\n strikethrough: true,\n lax_html_blocks: true,\n superscript: true\n }\n markdown = Redcarpet::Markdown.new(coderayified, options)\n markdown.render(text).html_safe\n end",
"def markdown(text)\n coderayified = CodeRayify.new(:filter_html => true, \n :hard_wrap => true,\n :coderay_default_lang => 'ruby'\n )\n options = {\n :fenced_code_blocks => true,\n :no_intra_emphasis => true,\n :autolink => true,\n }\n markdown_to_html = Redcarpet::Markdown.new(coderayified, options)\n markdown_to_html.render(text).html_safe\n end",
"def render_markdown\n return if self.markdown.nil?\n\n render_as = Redcarpet::Render::HTML\n engine = Redcarpet::Markdown.new(render_as, :autolink => true,\n :space_after_headers => true)\n self.content = engine.render(self.markdown)\n end",
"def call\n result = self.class.markdown_renderer.result(schema: schema)\n if @html\n result = self.class.html_renderer.result(body: self.class.redcarpet.render(result))\n result.gsub(/id=\"(.+)\"/) {|text| text.tr(\"/:\", \"\") }\n else\n result\n end\n end",
"def clean_content\n self.content = content.gsub('<p> </p>', '') if content\n end",
"def simple_markdown(text)\n simple_format markdown text\n end"
] | [
"0.74352145",
"0.68507135",
"0.68481845",
"0.6817936",
"0.677911",
"0.67780447",
"0.6670449",
"0.6661747",
"0.6655667",
"0.6638147",
"0.66181946",
"0.6597262",
"0.6528594",
"0.6495768",
"0.6495768",
"0.6495768",
"0.6469273",
"0.64557076",
"0.644955",
"0.6448245",
"0.64395905",
"0.6426557",
"0.6394009",
"0.6339573",
"0.633895",
"0.6332839",
"0.6332839",
"0.6318797",
"0.6313124",
"0.6303571",
"0.62951124",
"0.6288239",
"0.62829053",
"0.6281327",
"0.6280078",
"0.6280078",
"0.6268098",
"0.6258492",
"0.62529755",
"0.6226473",
"0.62261957",
"0.62239915",
"0.62239915",
"0.62180614",
"0.6208833",
"0.62068886",
"0.6205735",
"0.61989045",
"0.6198867",
"0.6195193",
"0.618968",
"0.6180827",
"0.6179323",
"0.6176162",
"0.6172624",
"0.6167443",
"0.61552864",
"0.6149267",
"0.61415744",
"0.61385894",
"0.6122996",
"0.6110394",
"0.61087",
"0.6107314",
"0.6099485",
"0.6095854",
"0.60735464",
"0.6073543",
"0.606487",
"0.60637414",
"0.60513765",
"0.60395634",
"0.6025911",
"0.6020109",
"0.60107297",
"0.6005407",
"0.60027313",
"0.6001764",
"0.599865",
"0.599865",
"0.59972674",
"0.5994922",
"0.5989752",
"0.59795386",
"0.59785134",
"0.59748644",
"0.5970776",
"0.5969571",
"0.5966786",
"0.5965296",
"0.596014",
"0.5957385",
"0.59526324",
"0.59525245",
"0.59522814",
"0.594927",
"0.5948646",
"0.59446084",
"0.5944563",
"0.59398717",
"0.59394866"
] | 0.0 | -1 |
must follow any before filters | def parse_values_from_ui
unless self.period_value.blank? and self.period_units.blank?
p = self.period_value.to_i
self.period = p.send(self.period_units).to_i
end
unless self.breach_duration_value.blank? and self.breach_duration_units.blank?
p = self.breach_duration_value.to_i
self.breach_duration = p.send(self.breach_duration_units).to_i
end
unless self.lower_breach_scale_increment_action.blank? and self.lower_breach_scale_increment_value.blank? and self.lower_breach_scale_increment_units.blank?
self.lower_breach_scale_increment = ''
self.lower_breach_scale_increment = '-' if self.lower_breach_scale_increment_action == 'decrease'
self.lower_breach_scale_increment += self.lower_breach_scale_increment_value.to_s
self.lower_breach_scale_increment += '%' if self.lower_breach_scale_increment_units == '%'
end
unless self.upper_breach_scale_increment_action.blank? and self.upper_breach_scale_increment_value.blank? and self.upper_breach_scale_increment_units.blank?
self.upper_breach_scale_increment = ''
self.upper_breach_scale_increment = '-' if self.upper_breach_scale_increment_action == 'decrease'
self.upper_breach_scale_increment += self.upper_breach_scale_increment_value.to_s
self.upper_breach_scale_increment += '%' if self.upper_breach_scale_increment_units == '%'
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def before(&b)\n filter :before, &b\n end",
"def before filter\n @station.before filter\n end",
"def before(*args, &block)\n add_filter :before, &(args.empty? ? block : construct_filter(*args, &block))\n end",
"def before; end",
"def before &block\n @before_blocks ||= []\n @before_blocks << block if block\n end",
"def before\n all? {|transition| transition.before}\n end",
"def before?\n !before.nil?\n end",
"def before\n\t\t\ttrue\n\t\tend",
"def before *a, &b; valid_in_context Event; define_hook :before, *a, &b; end",
"def before() ; end",
"def before(&proc)\n @before = proc if proc\n @before\n end",
"def before(*hooks, &block)\n before_hooks.unshift block if block\n hooks.each { |h| before_hooks.unshift h }\n end",
"def pre_hooks\n @to_perform.map do |hook|\n next unless hook.type.eql? :pre\n hook\n end.compact\n end",
"def before\n end",
"def before\n @before\n end",
"def before(&block)\n @before << block\n end",
"def before(handler = nil, *guards, &block)\n client.register_filter :before, handler, *guards, &block\n end",
"def valid_before; end",
"def before(options = {})\n @before_actions ||= {}\n @before_actions[@watched_sources.last] = { actions: [], conditions: [] }\n\n @before_actions[@watched_sources.last][:actions] = (options[:do].present? ? [options[:do]].flatten : [])\n @before_actions[@watched_sources.last][:conditions] = (options[:conditions].present? ? [options[:conditions]].flatten : [])\n end",
"def before_hooks\n options[:before]\n end",
"def eval_before_hook(locals: {})\n return if @before.blank?\n\n evaluate(@before, locals: locals)\n end",
"def before(*args, &block)\n before_callbacks.concat(args)\n before_callbacks << block if block\n end",
"def before(&block)\n block ? @before = block : @before\n end",
"def before(&block)\n handle(0, &block)\n end",
"def before *actions, &proc\n if proc\n actions = ['*'] if actions.size == 0\n actions.each { |a| @before[a] = proc }\n end\n @before\n end",
"def before\n end",
"def before() nil ; end",
"def before(datum); end",
"def before(datum); end",
"def before method_or_filter, options={}, &block\n _add_filter(:before, method_or_filter, options, block)\n end",
"def before(&block)\n rules.add('@document:before', Sawtooth::Rules::CallRule.new(:start => block)) if block_given?\n end",
"def before\n poller.before_request { |stats| yield(stats) }\n end",
"def before_processing\n end",
"def _Gvn_before(&block)\n _Gvn_before_blocks << block\n end",
"def before_enqueue(*filters, &blk)\n set_callback(:enqueue, :before, *filters, &blk)\n end",
"def before(&block)\n define_before_or_after_method_with_block(:before, &block)\n end",
"def before\n @options[:before]\n end",
"def before(&block)\n if !block_given? || block.arity != 1\n raise InvalidBlockSpecification, \"before proc should have an arity of 1 (Array: values)\"\n end \n @options[:before] = block\n end",
"def valid_before=(_arg0); end",
"def before(node_or_tags); end",
"def before(node_or_tags); end",
"def Before(&proc)\n (@before_procs ||= []) << proc\n end",
"def before(action = nil, options = Hash.new, &block)\n self.before_filters[action || block] = options\n end",
"def before(name,&callback)\n bot.before(name){|*args| callback.call(*args) if enabled}\n end",
"def before(identifier, &block)\n Chef::Sugar::Filters::Injector.new(self, identifier, :before).evaluate(&block)\n end",
"def before_filter_list method, klass\n controller = @tracker.controllers[klass]\n filters = []\n\n while controller\n filters = get_before_filters(method, controller) + filters\n\n controller = @tracker.controllers[controller[:parent]] ||\n @tracker.libs[controller[:parent]]\n end\n\n remove_skipped_filters filters, method, klass\n end",
"def before_running(*args); end",
"def before(*args, &block)\n if block_given?\n Thread.current[:before_hook] = block\n else\n Thread.current[:before_hook].call(*args) if Thread.current[:before_hook]\n end\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def before(*matches, &procedure)\n @_testcase.advice[:before][matches] = procedure\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def before_hook\n return unless before\n\n Logger.info \"Before Hook Starting...\"\n before.call\n Logger.info \"Before Hook Finished.\"\n rescue Exception => err\n @before_hook_failed = true\n ex = err.is_a?(StandardError) ? Error : FatalError\n raise ex.wrap(err, \"Before Hook Failed!\")\n end",
"def before\n return nil if blue_prints.before.empty?\n\n hook_action = HookAction.new(driver,\n blue_prints.before,\n :hook_action, self, location + '[hook_action]')\n\n hook_action.run\n end",
"def before(*hooks)\n self.class.new(relation, **options, before: before_hooks + hooks)\n end",
"def before\n event.user.chapters.where('events.timestamp < ? OR (events.timestamp = ? AND events.\"offset\" < ?)', beginning, beginning, event.offset).joins(:event)\n end",
"def process_before_filter name\n filter = find_method name, @current_class\n\n if filter.nil?\n Brakeman.debug \"[Notice] Could not find filter #{name}\"\n return\n end\n\n method = filter[:method]\n\n if ivars = @tracker.filter_cache[[filter[:controller], name]]\n ivars.each do |variable, value|\n env[variable] = value\n end\n else\n processor = Brakeman::AliasProcessor.new @tracker\n processor.process_safely(method.body_list, only_ivars(:include_request_vars))\n\n ivars = processor.only_ivars(:include_request_vars).all\n\n @tracker.filter_cache[[filter[:controller], name]] = ivars\n\n ivars.each do |variable, value|\n env[variable] = value\n end\n end\n end",
"def before\n if @pkg.has_key? :cmd and @pkg[:cmd].has_key? :before\n logger.info \"Executing before commands...\"\n run :before\n end\n end",
"def precedes; [] end",
"def before(event, &block)\n triggers[:before][event] << block\n self\n end",
"def before_filter(&block)\n @before_filter = block\n end",
"def prepend_before(*args, &proc)\n add_callback(:prepend_before, *args, &proc)\n end",
"def before_process(ant)\n @modifier.before_process(ant)\n end",
"def filters; end",
"def filters; end",
"def check_pre_guard_effects(user, item)\n check_effects(effect_objects, \"pre_guard\", user, item)\n end",
"def before_each(&block)\n @before_each_blocks << block\n end",
"def before_filter(&block)\n @before_filter = block\n end",
"def before_filter_if_not_already_added(method)\n unless filter_already_added? method\n before_filter method\n end\n end",
"def before_run; end",
"def before(*actions, &block)\n before_action = Moonrope::BeforeAction.new(@controller)\n before_action.block = block\n before_action.actions = actions\n @controller.befores << before_action\n before_action\n end",
"def before(type=:each, &block)\n raise ArgumentError, \"invalid before-type #{type}\" unless [:each, :all].include?(type)\n type_method = \"before_#{type}\"\n remove_method(type_method) rescue nil #if method_defined?(type_method)\n define_method(type_method, &block)\n end",
"def verify_not_before; end",
"def verify_not_before; end",
"def conditions; end",
"def conditions; end",
"def before_callbacks\n @before_callbacks ||= []\n end",
"def before(observation, &block)\n build_methods(:before, observation, &block)\n end",
"def before(selector, &block)\n add_pointcut BeforePointcut.new(selector, &block)\n end",
"def enforce_filter_order(filters)\n if self.class.const_defined?(:FILTER_ORDER)\n self.class.const_get(:FILTER_ORDER).map do |field|\n filters.detect { |f| f.field.to_s == field }\n end.compact\n else\n filters\n end\n end",
"def before_dispatch(env); end",
"def ts_apply_filters\n # TODO: Make filters for Thinking Sphinx\n end",
"def pre_filter\n @records = records\n .select {|event| event[:lang] != \"en\" }\n .select {|event| event[:reply_to].nil? }\n .select {|event| event[:text] =~ /^[0-9]{4}/ }\n end",
"def pre_execute(&block)\n @hooks[:pre_execute] = block\n end",
"def find_before(value)\n end",
"def before(name, &block)\n before_steps[name] ||= EMPTY_ARRAY.dup\n before_steps[name] << Step.new(type: :before, name: name, executor: block)\n self\n end",
"def apply_filter\n end",
"def precheck\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def strict_filters; end",
"def before(name, options={}, &block)\n self.add_block_container_to_list(\"before_#{name.to_s}\", options, &block)\n nil\n end",
"def by_created_at_before(events)\n return events unless params[:before]\n\n events.where('events.created_at < ?', params[:before].beginning_of_day)\n end",
"def _eval_pre\n _eval_action('pre')\n end",
"def before(identifier, &block)\n be_callable!(identifier) unless identifier.is_a?(Symbol)\n be_callable!(block)\n synchronize do\n if applied?\n raise Error::ApplyError,\n \"Hooks have already been applied to stack\"\n end\n @before_entries = before_entries +\n [Entry.new(identifier: identifier,\n action: Action.new(stack: stack, callable: block))]\n @before_entries.freeze\n end\n self\n end",
"def run_before_each_hooks( env )\n env[:hooks][:before_each].each { |hook| hook.call }\n end",
"def before?(inclusive, date, event)\n if (inclusive)\n event[:timestamp] <= date\n else\n event[:timestamp] < date\n end\nend",
"def before(name, &block)\n validate_step_name(name)\n before_steps[name] ||= EMPTY_ARRAY.dup\n before_steps[name] << block.to_proc\n self\n end",
"def before_inclusion(&block)\n if block && block.respond_to?(:call)\n before_inclusion_procs << block\n else\n raise 'Anything added to be called before_inclusion must be callable (respond to #call).'\n end\n end",
"def before_process(ant)\n end",
"def pre\n if phase.has_key?('pre')\n execute(\"pre\", phase['pre'])\n end\n end",
"def append_before(*args, &proc)\n add_callback(:append_before, *args, &proc)\n end",
"def filter_parameters; end"
] | [
"0.7641463",
"0.6953068",
"0.6872635",
"0.65766823",
"0.6538966",
"0.65335405",
"0.6519498",
"0.65084827",
"0.6483308",
"0.6453054",
"0.64445287",
"0.6424519",
"0.6411271",
"0.64094234",
"0.63996077",
"0.63953644",
"0.63746446",
"0.6333937",
"0.6311281",
"0.6302508",
"0.6248734",
"0.6238438",
"0.6229089",
"0.6185557",
"0.61812335",
"0.61664104",
"0.6153639",
"0.61193836",
"0.61193836",
"0.61045736",
"0.60696167",
"0.60646915",
"0.60489136",
"0.6047652",
"0.60274434",
"0.6007418",
"0.60026515",
"0.5961913",
"0.5945811",
"0.59445715",
"0.59445715",
"0.5933777",
"0.59330684",
"0.5927121",
"0.5906972",
"0.59066826",
"0.5893754",
"0.5891994",
"0.58917946",
"0.5888971",
"0.588426",
"0.58820355",
"0.58813393",
"0.5878122",
"0.585919",
"0.57750934",
"0.57696015",
"0.57506573",
"0.5750308",
"0.57374215",
"0.56913066",
"0.56475985",
"0.5646011",
"0.5646011",
"0.5633418",
"0.56276524",
"0.5619103",
"0.56055105",
"0.55929244",
"0.5591092",
"0.5576521",
"0.5574219",
"0.5574219",
"0.55675066",
"0.55675066",
"0.55572605",
"0.55544007",
"0.5542928",
"0.5541429",
"0.5540498",
"0.55262285",
"0.5522373",
"0.5522249",
"0.551875",
"0.55183244",
"0.55090845",
"0.5502857",
"0.54970986",
"0.5489476",
"0.54888165",
"0.54729867",
"0.547241",
"0.54720503",
"0.5463611",
"0.5462508",
"0.5455521",
"0.5455282",
"0.5452466",
"0.5450892",
"0.5447476",
"0.54439837"
] | 0.0 | -1 |
Never trust parameters from the scary internet, only allow the white list through. | def item_params
params.require(:item).permit(:name, :price, :quantity, :description, :image, :tag_list)
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def strong_params\n params.require(:user).permit(param_whitelist)\n end",
"def strong_params\n params.require(:listing_member).permit(param_whitelist)\n end",
"def allow_params_authentication!; end",
"def allowed_params\n ALLOWED_PARAMS\n end",
"def default_param_whitelist\n [\"mode\"]\n end",
"def param_whitelist\n [:role, :title]\n end",
"def expected_permitted_parameter_names; end",
"def safe_params\n params.except(:host, :port, :protocol).permit!\n end",
"def strong_params\n params.require(:team_member).permit(param_whitelist)\n end",
"def permitir_parametros\n \t\tparams.permit!\n \tend",
"def strong_params\n params.require(:community).permit(param_whitelist)\n end",
"def permitted_strong_parameters\n :all #or an array of parameters, example: [:name, :email]\n end",
"def strong_params\n params.require(:education).permit(param_whitelist)\n end",
"def restricted_params\n #params.require(self.controller_name.classify.underscore.to_sym).permit([])\n raise(\"No strong params set, override restricted_params method in your controller. E.g. params.require(:model).permit(:attribute1, :attribute2)\")\n end",
"def allowed_params\n params.require(:user).permit(:username, :email, :password, :password_confirmation)\n end",
"def param_whitelist\n [:rating, :review]\n end",
"def param_whitelist\n whitelist = [\n :username, :name,\n :parent_id,\n :headline, :description, :video,\n :policy, :signup_mode, :category,\n :website, :facebook, :twitter, :linkedin,\n :founded_at,\n privacy: [\n :events,\n :resources\n ],\n permission: [\n :profile,\n :members,\n :children,\n :statistics,\n :posts,\n :listings,\n :resources,\n :events\n ],\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:parent_id)\n unless current_user.role_in(@community) === 'owner'\n whitelist.delete(:privacy)\n whitelist.delete(:permission)\n end\n end\n \n whitelist\n end",
"def param_whitelist\n if @user.present? && current_user != @user\n return [:followed]\n end\n \n whitelist = [\n :username, :email, :password,\n :first_name, :last_name,\n :birthday, :gender,\n :headline, :biography, :ask_about, :focus,\n :website, :facebook, :linkedin, :twitter, :github,\n roles: [],\n skills: [],\n interests: [],\n privacy: { contact: [] },\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n \n if action_name === 'update'\n whitelist.delete(:email)\n whitelist.delete(:password)\n end\n \n whitelist\n end",
"def user_params \n \tparams.require(:user).permit(:name, :email, :password, :password_confirmation)# preventing CSTR\n end",
"def valid_params_request?; end",
"def user_params\n params.permit(:name, :phoneNumber, :address, :postalCode, :local, :link, :counter, :latitude, :longitude) \n end",
"def strong_params\n params.require(:experience).permit(param_whitelist)\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def whitelist_url_params\n params.require(:whitelist_url).permit(:domain)\n end",
"def allowed_params\n params.require(:allowed).permit(:email)\n end",
"def permitted_params\n []\n end",
"def trim_whitelisted(params, whitelist)\n # remove any parameters that are not whitelisted\n params.each do |key, value|\n # if white listed\n if whitelist.include? key\n # strip the parameters of any extra spaces, save as string\n params[key] = value.to_s.strip\n else\n # delete any unauthorized parameters\n params.delete key\n end\n end\n params\n end",
"def safe_params\n params.permit(:id, :name, :origin, :emails => []); #emails is an array\n end",
"def query_param\n\t\tparams.permit(:first_name, :last_name, :phone)\n\tend",
"def strong_params\n params.require(:success_metric).permit(param_whitelist)\n end",
"def devise_filter\r\n logger.debug(\"In devise_filter =>PARAMS: #{params.inspect}\")\r\n\r\n # White list for sign_up\r\n devise_parameter_sanitizer.for(:sign_up) { |u| u.permit(user_whitelist) }\r\n\r\n # White list for account update\r\n devise_parameter_sanitizer.for(:account_update) { |u| u.permit(user_whitelist, :current_password) }\r\n\r\n # White list for Invitation creation\r\n devise_parameter_sanitizer.for(:invite) { |u| u.permit(:account_type, :email, :invitation_token)}\r\n\r\n # White list for accept invitation\r\n devise_parameter_sanitizer.for(:accept_invitation) { |u| u.permit(user_whitelist, :invitation_token)}\r\n\r\n end",
"def whitelisted_user_params\n params.require(:user).\n permit( :first_name, :last_name, :email,:password,:password_confirmation,:birthday,:gender)\n end",
"def user_params\n ActionController::Parameters.permit_all_parameters = true\n params.require(:user) #.permit(:name, :surname, :phone, :password, :email, :time_zone)\n end",
"def safe_params\n params.require(:user).permit(:name)\n end",
"def strong_params\n params.require(:metric_change).permit(param_whitelist)\n end",
"def get_params\n\t\treturn ActionController::Parameters.new(self.attributes).permit(\"account_id\", \"title\", \"category\", \"introduction\", \"tags\", \"segment_type\", \"visible\", \"status\", \"main_image\")\n\tend",
"def grant_params\n @whitelisted = params.require(:grant).permit(:name, :description, :agency_id, :acronym)\n end",
"def check_params; true; end",
"def param_whitelist\n whitelist = [\n :description,\n :progress,\n :kpi_id\n ]\n \n unless action_name === 'create'\n whitelist.delete(:kpi_id)\n end\n \n whitelist\n end",
"def quote_params\n params.permit!\n end",
"def valid_params?; end",
"def paramunold_params\n params.require(:paramunold).permit!\n end",
"def user_params\n\t\tparams.permit(:nickname, :avatar, :description, :password, :gender, :birthday, :email, :phone, :qq_id, :wechat_id)\n\tend",
"def filtered_parameters; end",
"def user_params\n params.permit(\n \t:id,\n \t:email, \n \t:first_name, \n \t:last_name, \n \t:password, \n \t:confirm_token, \n \t:phone_number,\n \t:facebook_link,\n \t:car_model,\n \t:license_plate)\n end",
"def filtering_params\n params.permit(:email, :name)\n end",
"def check_params\n true\n end",
"def wx_public_params\n params.require(:wx_public).permit(:nickname, :manager, :alias)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def allowed_params\n params.require(:user).permit(:email, :password, :role, :first_name, :last_name, :password_confirmation)\n end",
"def listing_params\n\t\tparams.permit(:address, :transit_info, :rules, :other_info, :lat, :lng)\n\tend",
"def social_account_params\n\t\t\tparams.require(:social_account).permit!\n\t\tend",
"def safe_params\n resurce_name = self.class.resource_name\n params_method_name = \"#{resurce_name}_params\".to_sym\n if params[resurce_name]\n if respond_to?(params_method_name) || private_methods.include?(params_method_name)\n send(params_method_name)\n else\n raise ActiveModel::ForbiddenAttributesError, \"Please, define the '#{params_method_name}' method in #{self.class.name}\"\n end\n end\n end",
"def user_params\n params.require(:user).permit(:uri, :username, :password, :realname, :email, :publicvisible)\n end",
"def url_params\n params.require(:url).permit(:short_url, :original_url, :clicks, :ip_addresses)\n end",
"def model_params\n\t\tparams.require(:manager).permit(\n\t :user_name,\n :password,\n :email,\n \t\t\t)\n\tend",
"def article_params_whitelist\n params.require(:article).permit(:title, :description, category_ids: [])\n end",
"def college_whitelist_params\n params.require(:college_whitelist).permit(:status)\n end",
"def active_code_params\n params[:active_code].permit\n end",
"def filtering_params\n params.permit(:email)\n end",
"def ip_address_params\n\t\t\tparams.require(:ip_address).permit!\n end",
"def valid_params(params)\n params.permit(:user_id, :photo_id, :originX, :originY, :width, :height)\n end",
"def reserved_params\n params.require(:reserved).permit(:name, :email, :pax, :address, :KTP, :title)\n end",
"def pull_request_params\n whitelist = [\n :url,\n :id,\n :html_url,\n :diff_url,\n :patch_url,\n :issue_url,\n :number,\n :state,\n :locked,\n :title\n ]\n params.require(:pull_request).permit(whitelist)\n end",
"def post_params\n if current_user.admin? \n params.permit(:title, :body, :city, :country, :gps_location, :privacy, :visible, :latitude, :longitude, images: [], files: [])\n else \n params.permit(:title, :body, :city, :country, :gps_location, :privacy,:latitude, :longitude, images: [], files: [])\n end \n end",
"def list_params\n params.permit(:name)\n end",
"def filter_parameters; end",
"def filter_parameters; end",
"def vineyard_params\n params.permit(:vineyard_name, :email, :website_url, :phone, :address, :city, :region, :postcode, :country, :specialty, :description, :pet_friendly, :holiday, :tours, :events, :family_friendly, :cover_image, :image_one, :image_two, :image_three, :image_four, :user_id, :base64)\n end",
"def available_activity_params\n # params.require(:available_activity).permit(:type,:geometry,:properties)\n whitelisted = ActionController::Parameters.new({\n type: params.require(:available_activity)[:type],\n geometry: params.require(:available_activity)[:geometry].try(:permit!).to_h,\n properties: params.require(:available_activity)[:properties].try(:permit!).to_h\n }).try(:permit!)\n end",
"def user_params\n params.permit(:name, :username, :email, :password, :img_url, :bg_url, :coinbank)\n end",
"def user_params_pub\n\t \tparams[:user].permit(:hruid)\n\t end",
"def user_params\n params.permit(:id, :email, :password, :nickname, :status, :avatar, :flat_picture, :flatsharing_id, :member,\n :user, :color, :solde)\n end",
"def validate_search_inputs\n @whitelisted = params.fetch(:user, nil)\n if @whitelisted.blank?\n render_error(400, \"#{I18n.t('general_error.params_missing_key')}\": [I18n.t('general_error.params_missing_value', model: \"review\")])\n return\n else\n @whitelisted = @whitelisted.permit(:name, :uen, :description)\n end\n end",
"def url_whitelist; end",
"def param_whitelist\n [\n :title,\n :description,\n :organization,\n :team_id,\n :started_at,\n :finished_at,\n location: [\n :description,\n :street,\n :city,\n :state,\n :zip,\n :country,\n :latitude,\n :longitude\n ]\n ]\n end",
"def admin_social_network_params\n params.require(:social_network).permit!\n end",
"def filter_params\n params.require(:filters).permit(:letters)\n end",
"def valid_params(params)\n params.permit(:login, :first_name, :last_name, \n :password, :password_confirmation)\n end",
"def origin_params\n params.permit(:country, :state, :city, :postal_code, :address, :description)\n end",
"def permit_request_params\n params.permit(:address)\n end",
"def sensitive_params=(params)\n @sensitive_params = params\n end",
"def user_params\n # Ensure a user can't give themselves admin priveleges\n params.delete(:admin) if current_user.admin?\n params.require(:user).permit(:name, :email, :admin, :image)\n end",
"def secure_params\n params.require(:location).permit(:name)\n end",
"def strong_params\n params.require( :setting ).\n permit( :global_scan_limit, :per_user_scan_limit,\n :target_whitelist_patterns, :target_blacklist_patterns )\n end",
"def question_params\n params.require(:survey_question).permit(question_whitelist)\n end",
"def case_insensitive_params\n params.require(:case_insensitive).permit(:name)\n end",
"def empire_master_no_match_params\n params.require(:empire_master_no_match).permit(:uid, :last_name, :list, :search_date, :double, :source)\n end",
"def maintenance_request_params\n params[:maintenance_request].permit! #allow all parameters for now\n end",
"def unwanted_params\n params.require(:unwanted).permit(:title, :description, :image)\n end",
"def backend_user_params\n params.permit!\n end",
"def url_params\n params[:url].permit(:full)\n end",
"def filter_params\n\t\treturn params[:candidate].permit(:name_for_filter)\n\tend",
"def user_params\n params.permit(:name, :age, :username, :display_photo, :password)\n end",
"def speed_measurement_params\n\n #fuckit, to lazy to deal with permit crap right now\n ActionController::Parameters.permit_all_parameters = true\n\n params[:speed_measurement]\n end",
"def get_params\r\n #params.require(:article).permit(:title, :permalink, :content, :source_site, :introtext, :type_id, :order_by, :searchable, :created_by, :edited_by, :published_by, :published_on, :user_id)\r\n params.require(:article).permit!\r\n\r\n end",
"def pub_params\n params.require(:pub).permit(:name, :description, :phone, :email, :hidden, :city_id, :address)\n end",
"def pass_params\n params[:pass].permit(:name, :price, :description, :colour, :events)\n end",
"def droptraining_params\n params.permit(:training_id,:user_id, :utf8, :authenticity_token, :commit)\n end",
"def person_params\n # params whitelist does *not* include admin, sub, remember_token\n # TBD: share this whitelist with the list used by configuration_permitted_parameters\n # TBD: should current_password be on this list? -- for now, leaving off, since it seems to work without\n # NOTE: do not include 'admin' in this list!\n params.require(:person).permit(\n :name, \n :email, \n :description,\n :password, \n :password_confirmation\n )\n end",
"def parameter_params\n params.require(:parameter).permit(:name, :description, :param_code, :param_value, :active_from, :active_to)\n end"
] | [
"0.6981273",
"0.6783789",
"0.67460483",
"0.6742222",
"0.67354137",
"0.65934366",
"0.65028495",
"0.6497783",
"0.64826745",
"0.6479415",
"0.6456823",
"0.6440081",
"0.63800216",
"0.6376521",
"0.636652",
"0.6319898",
"0.6300256",
"0.62994003",
"0.6293621",
"0.6292629",
"0.6291586",
"0.629103",
"0.6282451",
"0.6243152",
"0.62413",
"0.6219024",
"0.6213724",
"0.62103724",
"0.61945",
"0.61786324",
"0.61755824",
"0.6173267",
"0.6163613",
"0.6153058",
"0.61521065",
"0.6147508",
"0.61234015",
"0.61168665",
"0.6107466",
"0.6106177",
"0.6091159",
"0.60817343",
"0.6071238",
"0.6062299",
"0.6021663",
"0.60182893",
"0.6014239",
"0.6011563",
"0.60080767",
"0.60080767",
"0.60028875",
"0.60005623",
"0.59964156",
"0.5993086",
"0.5992319",
"0.5992299",
"0.59801805",
"0.59676576",
"0.59606016",
"0.595966",
"0.59591126",
"0.59589803",
"0.5954058",
"0.5953234",
"0.5944434",
"0.5940526",
"0.59376484",
"0.59376484",
"0.5935253",
"0.5930846",
"0.5926387",
"0.59256274",
"0.5917907",
"0.5910841",
"0.590886",
"0.59086543",
"0.59060425",
"0.58981544",
"0.5898102",
"0.5896809",
"0.5895416",
"0.58947027",
"0.58923644",
"0.5887903",
"0.58830196",
"0.5880581",
"0.5873854",
"0.58697754",
"0.5869004",
"0.58669055",
"0.5866886",
"0.58664906",
"0.5864619",
"0.58630043",
"0.5862495",
"0.5861368",
"0.5859712",
"0.5855544",
"0.58551925",
"0.5851284",
"0.5850602"
] | 0.0 | -1 |
Use callbacks to share common setup or constraints between actions. | def set_store
if !params[:store_id].nil?
@store = Store.find(params[:store_id])
else
@store = @item.store
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
Use callbacks to share common setup or constraints between actions. | def set_item
if !params[:item_id].nil?
@item = Item.find(params[:item_id])
else
@item = Item.find(params[:id])
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_required_actions\n # TODO: check what fields change to asign required fields\n end",
"def action_hook; end",
"def run_actions; end",
"def define_action_hook; end",
"def actions; end",
"def define_action_helpers\n if super && action == :save\n @instance_helper_module.class_eval do\n define_method(:valid?) do |*args|\n self.class.state_machines.fire_event_attributes(self, :save, false) { super(*args) }\n end\n end\n end\n end",
"def add_actions; end",
"def callbacks; end",
"def callbacks; end",
"def setup *actions, &proc\n (@setup_procs ||= []) << [proc, actions.size > 0 ? actions : [:*]]\n end",
"def define_action_helpers; end",
"def post_setup\n end",
"def action_methods; end",
"def action_methods; end",
"def action_methods; end",
"def before_setup; end",
"def action_run\n end",
"def execute(setup)\n @action.call(setup)\n end",
"def define_action_helpers?; end",
"def set_actions\n actions :all\n end",
"def action_done(action)\n dispatch = { :migrate => :done_migrating, :map => :done_mapping, :reduce =>\n :done_reducing, :finalize => :done_finalizing } \n self.send dispatch[action[:action]], action\n end",
"def dependencies action, &block\n @actions.each do |other|\n if action[:requires].include? other[:provide]\n block.call other\n end\n end\n end",
"def setup!\n return unless @setup_procs\n http_actions = actions\n @setup_procs.each do |setup_proc|\n proc, actions = setup_proc\n @setup__actions = actions.map do |action|\n\n action.is_a?(Regexp) ?\n http_actions.select { |a| a.to_s =~ action } :\n action.is_a?(String) && action =~ /\\A\\./ ?\n http_actions.map { |a| a.to_s << action if format?(a).include?(action) }.compact :\n action\n\n end.flatten\n self.class_exec &proc\n @setup__actions = nil\n end\n @setup_procs = nil\n end",
"def before_actions(*logic)\n self.before_actions = logic\n end",
"def setup_handler\n end",
"def set_action(opts)\n opts = check_params(opts,[:actions])\n super(opts)\n end",
"def setup(action)\n @targets.clear\n unless action.item.target_filters.empty?\n @targets = SES::TargetManager.make_targets(action)\n else\n item = action.item\n if item.for_opponent?\n @targets = $game_troop.alive_members\n elsif item.for_dead_friend?\n @targets = $game_party.battle_members.select { |actor| actor.dead? }\n else\n $game_party.battle_members.select { |actor| actor.alive? }\n end\n end\n @item_max = @targets.size\n create_contents\n refresh\n show\n activate\n end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def action; end",
"def workflow\n end",
"def revisable_shared_setup(args, block)\n class << self\n attr_accessor :revisable_options\n end\n options = args.extract_options!\n self.revisable_options = Options.new(options, &block)\n \n self.send(:include, Common)\n self.send(:extend, Validations) unless self.revisable_options.no_validation_scoping?\n self.send(:include, WithoutScope::QuotedColumnConditions)\n end",
"def setup\n @action = SampleActionAndroid.new(os_name: 'android',\n app_name: APP_PATH)\n end",
"def before(action)\n invoke_callbacks *self.class.send(action).before\n end",
"def process_action(...)\n send_action(...)\n end",
"def before_dispatch(env); end",
"def after_actions(*logic)\n self.after_actions = logic\n end",
"def setup\n # override and do something appropriate\n end",
"def setup(client)\n return unless @setup\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n actions.each do |action|\n action.execute(client)\n end\n self\n end",
"def setup(_context)\n end",
"def setup(resources) ; end",
"def validate_actions\n errors.add(:base, :should_give_at_least_one_action) if !manage? && !forecasting? && !read? && !api?\n end",
"def setup\n @resource_config = {\n :callbacks => {\n :before_create => nil,\n :after_create => nil,\n :before_update => nil,\n :after_update => nil,\n :before_destroy => nil,\n :after_destroy => nil,\n },\n :child_assoc => nil,\n :model => nil,\n :parent => nil,\n :path => nil,\n :permission => {},\n :properties => {},\n :relation => {\n :create => nil,\n :delete => nil,\n },\n :roles => nil,\n }\n end",
"def determine_valid_action\n\n end",
"def process_shared\n handle_taxes\n handle_shippings\n create_adjustments_from_params\n handle_status\n handle_inventory_refunds\n handle_payment_transactions\n order.updater.update\n end",
"def startcompany(action)\n @done = true\n action.setup\n end",
"def init_actions\n am = action_manager()\n am.add_action(Action.new(\"&Disable selection\") { @selection_mode = :none; unbind_key(32); bind_key(32, :scroll_forward); } )\n am.add_action(Action.new(\"&Edit Toggle\") { @edit_toggle = !@edit_toggle; $status_message.value = \"Edit toggle is #{@edit_toggle}\" })\n end",
"def event_callbacks(event, metadata={})\n case event\n when :reset, :review\n if confirmed\n update_attributes(confirmed: false)\n end\n when :confirm\n confirm\n # trigger :order for all applicable items\n # NOTE: :order event is common to both physical and digital items\n items.each do |i|\n if i.event_permitted(:order)\n user_id = last_transition.user_id\n i.trigger!(:order, { order_id: id, user_id: user_id })\n end\n end\n when :complete_work\n request = metadata[:request]\n work_complete_notification(request)\n when :close\n close\n end\n if event != :close && !open\n reopen\n end\n end",
"def setup_action\n return unless PONY::ERRNO::check_sequence(current_act)\n new_sequence = @action_sequence[@sequence_index+1...@action_sequence.size]\n @sequence_index = 0\n new_sequence = DND::SkillSequence::ACTS[@acts[1]] + new_sequence\n execute_sequence\n end",
"def define_tasks\n define_weave_task\n connect_common_tasks\n end",
"def setup(&block)\n define_method(:setup, &block)\n end",
"def setup\n transition_to(:setup)\n end",
"def setup\n transition_to(:setup)\n end",
"def action\n end",
"def setup( *args )\n\t\t\tself.class.setupBlocks.each {|sblock|\n\t\t\t\tdebugMsg \"Calling setup block method #{sblock}\"\n\t\t\t\tself.send( sblock )\n\t\t\t}\n\t\t\tsuper( *args )\n\t\tend",
"def config(action, *args); end",
"def setup\n @setup_proc.call(self) if @setup_proc\n end",
"def before_action \n end",
"def setup_callbacks\n defined_callbacks.each do |meth|\n unless respond_to?(\"call_#{meth}_callbacks\".to_sym)\n self.class.module_eval <<-EOE\n def call_#{meth}_callbacks(*args)\n plugin_store.each {|a| a.call_#{meth}_callbacks(*args) } if respond_to?(:plugin_store) && plugin_store\n self.send :#{meth}, *args if respond_to?(:#{meth})\n end\n EOE\n end\n end\n end",
"def action\n end",
"def matt_custom_action_begin(label); end",
"def setup\n # override this if needed\n end",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def setup\n\t\t\t\t\t\t# Do nothing\n\t\t\t\tend",
"def action(options,&callback)\n new_action = Action===options ? options : Action.new(options,&callback)\n # replace any with (shared name/alias or both default) + same arity\n @actions.delete_if do |existing_action|\n ((existing_action.names & new_action.names).size > 0 ||\n existing_action.default? && new_action.default?) &&\n existing_action.required.size == new_action.required.size &&\n existing_action.optional.size <= new_action.optional.size\n end\n @actions = (@actions + [new_action]).sort\n new_action\n end",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action\n end",
"def after(action)\n invoke_callbacks *options_for(action).after\n end",
"def pre_task\n end",
"def setup(server)\n server.on('beforeMethod', method(:before_method), 10)\n end",
"def add_actions\n attribute = machine.attribute\n name = self.name\n \n owner_class.class_eval do\n define_method(name) {self.class.state_machines[attribute].events[name].fire(self)}\n define_method(\"#{name}!\") {self.class.state_machines[attribute].events[name].fire!(self)}\n define_method(\"can_#{name}?\") {self.class.state_machines[attribute].events[name].can_fire?(self)}\n end\n end",
"def init_actions\n @select_action = SelectAction.new\n @endpoint_mouse_action = EndpointMouseAction.new\n @move_action = MoveAction.new\n end",
"def setup_signals; end",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def after_created\r\n return unless compile_time\r\n Array(action).each do |action|\r\n run_action(action)\r\n end\r\nend",
"def set_target_and_action target, action\n self.target = target\n self.action = 'sugarcube_handle_action:'\n @sugarcube_action = action.respond_to?('weak!') ? action.weak! : action\n end",
"def initialize(*args)\n super\n @action = :set\nend",
"def after_set_callback; end",
"def setup\n #implement in subclass;\n end",
"def lookup_action; end",
"def setup &block\n if block_given?\n @setup = block\n else\n @setup.call\n end\n end",
"def setup_action\n return TSBS.error(@acts[0], 1, @used_sequence) if @acts.size < 2\n actions = TSBS::AnimLoop[@acts[1]]\n if actions.nil?\n show_action_error(@acts[1])\n end\n @sequence_stack.push(@acts[1])\n @used_sequence = @acts[1]\n actions.each do |acts|\n @acts = acts\n execute_sequence\n break if @break_action\n end\n @sequence_stack.pop\n @used_sequence = @sequence_stack[-1]\n end",
"def release_actions; end",
"def around_hooks; end",
"def save_action; end",
"def setup(easy)\n super\n easy.customrequest = @verb\n end",
"def action_target()\n \n end",
"def setup\n callback(:setup) do\n notify(:setup)\n migration_check.last_deployed_commit\n end\n end",
"def setup\n return unless @setup\n\n actions = @setup['setup'].select { |action| action['do'] }.map { |action| Action.new(action['do']) }\n run_actions_and_retry(actions)\n self\n end",
"def before_setup\n # do nothing by default\n end",
"def my_actions(options)\n @setup = false\n get_template_part(\"custom_used\",\"action_users\",true)\n end",
"def default_action; end",
"def setup(&blk)\n @setup_block = blk\n end",
"def callback_phase\n super\n end",
"def advice\n end",
"def _handle_action_missing(*args); end",
"def duas1(action)\n action.call\n action.call\nend",
"def shared_action(name, &block)\n @controller.shared_actions[name] = block\n end",
"def before_action action, &block\n @audience[:before][action] ||= Set.new\n @audience[:before][action] << block\n end",
"def setup_initial_state\n\n state_a = State.new(\"a\", 0)\n state_b = State.new(\"b\", 0)\n state_c = State.new(\"c\", 10)\n\n move_to_b = Action.new(\"move_to_b\", 1, state_b)\n\n move_to_c = Action.new(\"move_to_c\", 1, state_c)\n\n state_a.actions = [move_to_b, move_to_c]\n\n return state_a\n \nend"
] | [
"0.6163163",
"0.6045976",
"0.5946146",
"0.591683",
"0.5890051",
"0.58349305",
"0.5776858",
"0.5703237",
"0.5703237",
"0.5652805",
"0.5621621",
"0.54210985",
"0.5411113",
"0.5411113",
"0.5411113",
"0.5391541",
"0.53794575",
"0.5357573",
"0.53402257",
"0.53394014",
"0.53321576",
"0.53124547",
"0.529654",
"0.5296262",
"0.52952296",
"0.52600986",
"0.52442724",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.52385926",
"0.5232394",
"0.523231",
"0.5227454",
"0.52226824",
"0.52201617",
"0.5212327",
"0.52079266",
"0.52050185",
"0.51754695",
"0.51726824",
"0.51710224",
"0.5166172",
"0.5159343",
"0.51578903",
"0.51522785",
"0.5152022",
"0.51518047",
"0.51456624",
"0.51398855",
"0.5133759",
"0.5112076",
"0.5111866",
"0.5111866",
"0.5110294",
"0.5106169",
"0.509231",
"0.50873137",
"0.5081088",
"0.508059",
"0.50677156",
"0.50562143",
"0.5050554",
"0.50474834",
"0.50474834",
"0.5036181",
"0.5026331",
"0.5022976",
"0.5015441",
"0.50121695",
"0.5000944",
"0.5000019",
"0.4996878",
"0.4989888",
"0.4989888",
"0.49864885",
"0.49797225",
"0.49785787",
"0.4976161",
"0.49683493",
"0.4965126",
"0.4958034",
"0.49559742",
"0.4954353",
"0.49535993",
"0.4952725",
"0.49467874",
"0.49423352",
"0.49325448",
"0.49282882",
"0.49269363",
"0.49269104",
"0.49252945",
"0.4923091",
"0.49194667",
"0.49174926",
"0.49173003",
"0.49171105",
"0.4915879",
"0.49155936"
] | 0.0 | -1 |
ensure that there are no line items referencing this product | def ensure_not_referenced_by_any_line_item
if line_items.empty?
return true
else
errors.add(:base, 'Line Items present')
return false
end
end | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ensure_not_referenced_by_any_line_item\n\t\t\tunless line_items.empty?\n\t\t\t\terrors.add(:base, 'Line items reference this product')\n\t\t\t\tthrow :abort\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item_product\n unless line_item_products.empty?\n errors.add(:base, 'Line Items Products present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n raise Error.new \"Line Items present\"\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tunless line_items.empty?\n\t\t\terrors.add(:base, 'Line Items Presents')\n\t\t\tthrow :abort\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Product sedang di referensikan oleh Line Item')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tunless line_items.empty?\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\tthrow :abort\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item #in this case before destroy a row in the database\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Itens present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n return if line_items.empty?\n\n errors.add(:base, \"Line Items present\")\n throw :abort\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items Present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors[:base] << \"Line Items Prsent\"\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\r\n\t\tunless line_items.empty?\r\n\t\t\terrors.add(:base, 'Line Items Present')\r\n\t\t throw :abort\r\n\t\tend\r\n\t\r\n\tend",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item # hook method (a method that Rails calls automatically at a given point in an object’s life)\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, \"Line Items present\")\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Istnieja powiazania z Line Items')\n return false;\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n \tif line_items.empty?\n \t\treturn true\n \telse\n \t\terrors.add(:base, 'Line Items Present')\n \t\treturn false\n \tend\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\telse\n\t\t\terrors[:base] << \"Line Items present\"\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base,'Line Items present') #We associate errors with the base object\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_items\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items Present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.emty?\n\t\t\treturn true\n\t\tesle\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item \n \tif line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true \n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false \n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.count.zero?\n\t\t\treturn true\n\t\t\telse\n\t\t\terrors.add(:base, 'Line Items present' )\n\t\t\treturn false\n\t\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.empty?\n \t return true\n \telse\n \t errors.add(:base, 'Line Items present' )\n \treturn false\n \tend\n end",
"def ensure_not_referenced_by_any_line_item\n \tif line_items.empty?\n \t\treturn true\n \telse\n \t\terrors.add(:base, 'Line Items present')\n \t\treturn false\n \tend \t\t\n end",
"def ensure_not_referenced_by_any_line_item \n\t\tif line_items.empty?\n\t\t\treturn true \n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.empty?\n\treturn true\n\telse\n\terrors.add(:base, 'Line Items present')\n\treturn false\n\tend\n\tend",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n\t return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item \n if line_items.empty?\n return true \n else\n errors.add(:base, 'Line Items present')\n return false \n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_item1s.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t return true\n\t \telse\n\t \t errors.add(:base, 'Line Items present')\n\t return false\n\t end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\t\tend\n\t\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\tif line_items.count.zero?\n\treturn true\n\telse\n\terrors[:base] << \"Line Items present\"\n\treturn false\n\tend\nend",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line Items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n \t if line_items.empty?\n \t \treturn true\n \t else\n \t \terrors.add(:base, 'Line items present')\n \t \treturn false\n \t end\n \tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\" #这是什么意思\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n\t\t\tif line_items.empty?\n\t\t\t\treturn true\n\t\t\telse\n\t\t\t\terrors.add(:base, 'Line Items present')\n\t\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n if line_items.count.zero?\n return true\n else\n errors[:base] << \"Line Items present\"\n return false\n end \n end",
"def ensure_not_referenced_by_any_line_item\n\t\tif line_items.empty?\n\t\t\treturn true\n\t\telse\n\t\t\terrors.add(:base, 'Line items present')\n\t\t\treturn false\n\t\tend\n\tend",
"def ensure_not_referenced_by_any_line_item\n\t if line_items.count.zero?\n\t\t return true\n\t else\n\t\t errors[:base] << \"Line Items present\"\n\t\t return false\n\t end\n\tend",
"def ensure_not_referenced_by_any_line_item\n unless line_items.empty?\n errors.add(:base, 'Line Items present')\n throw :abort\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end",
"def ensure_not_referenced_by_any_line_item\n if line_items.empty?\n return true\n else\n errors.add(:base, 'Line Items present')\n return false\n end\n end"
] | [
"0.8736616",
"0.853958",
"0.8289557",
"0.8246923",
"0.82232034",
"0.82068616",
"0.8206619",
"0.81594956",
"0.81327915",
"0.8104883",
"0.81018025",
"0.80855167",
"0.8076766",
"0.8076766",
"0.80568",
"0.8037973",
"0.8020351",
"0.8020351",
"0.8020351",
"0.80173016",
"0.8011286",
"0.8010101",
"0.80063355",
"0.7984239",
"0.7982983",
"0.7972746",
"0.7972746",
"0.796831",
"0.796309",
"0.79618263",
"0.79607385",
"0.79537976",
"0.7946839",
"0.7944792",
"0.79428923",
"0.7942568",
"0.79402906",
"0.793406",
"0.793406",
"0.793406",
"0.7926673",
"0.7919657",
"0.7916912",
"0.7916262",
"0.7914657",
"0.79127026",
"0.791117",
"0.79110795",
"0.7910755",
"0.79104286",
"0.7909076",
"0.7904992",
"0.79046524",
"0.79046524",
"0.79046524",
"0.79046524",
"0.79046524",
"0.79046524",
"0.79046524",
"0.79024816",
"0.7898809",
"0.7895409",
"0.78943604",
"0.78941166",
"0.78941166",
"0.78941166",
"0.78941166",
"0.78941166",
"0.78941166",
"0.78941166",
"0.7892181",
"0.78880906",
"0.7884762",
"0.7883583",
"0.78734857",
"0.7860402",
"0.785006",
"0.7848537",
"0.7848537",
"0.7848537",
"0.7848537",
"0.7848537"
] | 0.78552985 | 91 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.